commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
dd5b4f0b091963c197e36ea20b3ce4249e7fadc4
|
tests/test_ffi.py
|
tests/test_ffi.py
|
import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == bytes(ffi.buffer(bytes_to_cdata(bs), len(bs)))
|
import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == ffi.buffer(bytes_to_cdata(bs), len(bs))[:]
|
Fix pypy by using alternate cffi minibuffer syntax
|
Fix pypy by using alternate cffi minibuffer syntax
|
Python
|
apache-2.0
|
tych0/xcffib
|
import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == bytes(ffi.buffer(bytes_to_cdata(bs), len(bs)))
Fix pypy by using alternate cffi minibuffer syntax
|
import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == ffi.buffer(bytes_to_cdata(bs), len(bs))[:]
|
<commit_before>import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == bytes(ffi.buffer(bytes_to_cdata(bs), len(bs)))
<commit_msg>Fix pypy by using alternate cffi minibuffer syntax<commit_after>
|
import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == ffi.buffer(bytes_to_cdata(bs), len(bs))[:]
|
import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == bytes(ffi.buffer(bytes_to_cdata(bs), len(bs)))
Fix pypy by using alternate cffi minibuffer syntaximport six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == ffi.buffer(bytes_to_cdata(bs), len(bs))[:]
|
<commit_before>import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == bytes(ffi.buffer(bytes_to_cdata(bs), len(bs)))
<commit_msg>Fix pypy by using alternate cffi minibuffer syntax<commit_after>import six
from xcffib.ffi import ffi, bytes_to_cdata
def test_bytes_to_cdata():
bs = six.b('these are some bytes')
assert bs == ffi.buffer(bytes_to_cdata(bs), len(bs))[:]
|
f3e076dae625cebee779757abd030f4b6c08167d
|
src/engine/SCons/Tool/MSVCCommon/netframework.py
|
src/engine/SCons/Tool/MSVCCommon/netframework.py
|
import os
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
|
import os
import re
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
def query_versions():
froot = find_framework_root()
if froot:
os.listdir(froot)
l = re.compile('v[0-9]+.*')
versions = filter(lambda e, l=l: l.match(e), contents)
def versrt(a,b):
# since version numbers aren't really floats...
aa = a[1:]
bb = b[1:]
aal = string.split(aa, '.')
bbl = string.split(bb, '.')
# sequence comparison in python is lexicographical
# which is exactly what we want.
# Note we sort backwards so the highest version is first.
return cmp(bbl,aal)
versions.sort(versrt)
else:
versions = []
return versions
|
Add a function to query available .net frameworks.
|
Add a function to query available .net frameworks.
|
Python
|
mit
|
azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons
|
import os
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
Add a function to query available .net frameworks.
|
import os
import re
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
def query_versions():
froot = find_framework_root()
if froot:
os.listdir(froot)
l = re.compile('v[0-9]+.*')
versions = filter(lambda e, l=l: l.match(e), contents)
def versrt(a,b):
# since version numbers aren't really floats...
aa = a[1:]
bb = b[1:]
aal = string.split(aa, '.')
bbl = string.split(bb, '.')
# sequence comparison in python is lexicographical
# which is exactly what we want.
# Note we sort backwards so the highest version is first.
return cmp(bbl,aal)
versions.sort(versrt)
else:
versions = []
return versions
|
<commit_before>import os
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
<commit_msg>Add a function to query available .net frameworks.<commit_after>
|
import os
import re
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
def query_versions():
froot = find_framework_root()
if froot:
os.listdir(froot)
l = re.compile('v[0-9]+.*')
versions = filter(lambda e, l=l: l.match(e), contents)
def versrt(a,b):
# since version numbers aren't really floats...
aa = a[1:]
bb = b[1:]
aal = string.split(aa, '.')
bbl = string.split(bb, '.')
# sequence comparison in python is lexicographical
# which is exactly what we want.
# Note we sort backwards so the highest version is first.
return cmp(bbl,aal)
versions.sort(versrt)
else:
versions = []
return versions
|
import os
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
Add a function to query available .net frameworks.import os
import re
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
def query_versions():
froot = find_framework_root()
if froot:
os.listdir(froot)
l = re.compile('v[0-9]+.*')
versions = filter(lambda e, l=l: l.match(e), contents)
def versrt(a,b):
# since version numbers aren't really floats...
aa = a[1:]
bb = b[1:]
aal = string.split(aa, '.')
bbl = string.split(bb, '.')
# sequence comparison in python is lexicographical
# which is exactly what we want.
# Note we sort backwards so the highest version is first.
return cmp(bbl,aal)
versions.sort(versrt)
else:
versions = []
return versions
|
<commit_before>import os
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
<commit_msg>Add a function to query available .net frameworks.<commit_after>import os
import re
from common import read_reg, debug
_FRAMEWORKDIR_HKEY_ROOT = r'Software\Microsoft\.NETFramework\InstallRoot'
def find_framework_root():
# XXX: find it from environment (FrameworkDir)
try:
froot = read_reg(_FRAMEWORKDIR_HKEY_ROOT)
debug("Found framework install root in registry: %s" % froot)
except WindowsError, e:
debug("Could not read reg key %s" % _FRAMEWORKDIR_HKEY_ROOT)
return None
if not os.path.exists(froot):
debug("%s not found on fs" % froot)
return None
return froot
def query_versions():
froot = find_framework_root()
if froot:
os.listdir(froot)
l = re.compile('v[0-9]+.*')
versions = filter(lambda e, l=l: l.match(e), contents)
def versrt(a,b):
# since version numbers aren't really floats...
aa = a[1:]
bb = b[1:]
aal = string.split(aa, '.')
bbl = string.split(bb, '.')
# sequence comparison in python is lexicographical
# which is exactly what we want.
# Note we sort backwards so the highest version is first.
return cmp(bbl,aal)
versions.sort(versrt)
else:
versions = []
return versions
|
c46ff58eb52f5610c858940c8159dfce7b73dc7b
|
panoptes_client/avatar.py
|
panoptes_client/avatar.py
|
from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class Avatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
|
from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class ProjectAvatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
|
Rename the 'Avatar' class to 'ProjectAvatar'
|
Rename the 'Avatar' class to 'ProjectAvatar'
|
Python
|
apache-2.0
|
zooniverse/panoptes-python-client
|
from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class Avatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
Rename the 'Avatar' class to 'ProjectAvatar'
|
from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class ProjectAvatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
|
<commit_before>from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class Avatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
<commit_msg>Rename the 'Avatar' class to 'ProjectAvatar'<commit_after>
|
from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class ProjectAvatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
|
from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class Avatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
Rename the 'Avatar' class to 'ProjectAvatar'from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class ProjectAvatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
|
<commit_before>from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class Avatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
<commit_msg>Rename the 'Avatar' class to 'ProjectAvatar'<commit_after>from panoptes_client.panoptes import (
Panoptes,
PanoptesObject,
LinkResolver,
)
from panoptes_client.project import Project
class ProjectAvatar(PanoptesObject):
_api_slug = 'avatar'
_link_slug = 'avatars'
_edit_attributes = ()
@classmethod
def http_get(cls, path, params={}, headers={}):
project = params.pop('project')
# print()
# print(Project.url(project.id))
# print()
avatar_response = Panoptes.client().get(
Project.url(project.id) + cls.url(path),
params,
headers,
)
print(avatar_response.raw)
return avatar_response
LinkResolver.register(Avatar)
LinkResolver.register(Avatar, 'avatar')
|
27b059a0e478d0c7a71c9695f5861eb28b905e7c
|
tomviz/python/setup.py
|
tomviz/python/setup.py
|
from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
|
from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
|
Use PEP 508 URL dependencies to specify patched jsonpatch
|
Use PEP 508 URL dependencies to specify patched jsonpatch
Signed-off-by: Chris Harris <a361e89d1eba6c570561222d75facbbf7aaeeafe@kitware.com>
|
Python
|
bsd-3-clause
|
OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz
|
from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
Use PEP 508 URL dependencies to specify patched jsonpatch
Signed-off-by: Chris Harris <a361e89d1eba6c570561222d75facbbf7aaeeafe@kitware.com>
|
from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
<commit_msg>Use PEP 508 URL dependencies to specify patched jsonpatch
Signed-off-by: Chris Harris <a361e89d1eba6c570561222d75facbbf7aaeeafe@kitware.com><commit_after>
|
from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
|
from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
Use PEP 508 URL dependencies to specify patched jsonpatch
Signed-off-by: Chris Harris <a361e89d1eba6c570561222d75facbbf7aaeeafe@kitware.com>from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
<commit_msg>Use PEP 508 URL dependencies to specify patched jsonpatch
Signed-off-by: Chris Harris <a361e89d1eba6c570561222d75facbbf7aaeeafe@kitware.com><commit_after>from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch@https://github.com/cjh1/python-json-patch/archive/tomviz.zip', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
|
25b84238204d3970c72d0ac133b0ff59ae4696bd
|
social/models.py
|
social/models.py
|
from django.db import models
# Create your models here.
class User(models.Model):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
password = models.TextField()
join_date = models.DateField('date joined')
def __str__(self):
return self.display_name
def set_password(self, new_password):
self.password = new_password
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
|
from django.contrib.auth.models import AbstractBaseUser
from django.db import models
# Create your models here.
class User(AbstractBaseUser):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
join_date = models.DateField('date joined')
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = []
def get_full_name(self):
return self.display_name
def get_short_name(self):
return self.display_name
def __str__(self):
return self.display_name
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
|
Implement User that extends Django's AbstractBaseUser.
|
Implement User that extends Django's AbstractBaseUser.
|
Python
|
mit
|
eyohansa/temu,eyohansa/temu,eyohansa/temu
|
from django.db import models
# Create your models here.
class User(models.Model):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
password = models.TextField()
join_date = models.DateField('date joined')
def __str__(self):
return self.display_name
def set_password(self, new_password):
self.password = new_password
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
Implement User that extends Django's AbstractBaseUser.
|
from django.contrib.auth.models import AbstractBaseUser
from django.db import models
# Create your models here.
class User(AbstractBaseUser):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
join_date = models.DateField('date joined')
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = []
def get_full_name(self):
return self.display_name
def get_short_name(self):
return self.display_name
def __str__(self):
return self.display_name
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
|
<commit_before>from django.db import models
# Create your models here.
class User(models.Model):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
password = models.TextField()
join_date = models.DateField('date joined')
def __str__(self):
return self.display_name
def set_password(self, new_password):
self.password = new_password
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
<commit_msg>Implement User that extends Django's AbstractBaseUser.<commit_after>
|
from django.contrib.auth.models import AbstractBaseUser
from django.db import models
# Create your models here.
class User(AbstractBaseUser):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
join_date = models.DateField('date joined')
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = []
def get_full_name(self):
return self.display_name
def get_short_name(self):
return self.display_name
def __str__(self):
return self.display_name
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
|
from django.db import models
# Create your models here.
class User(models.Model):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
password = models.TextField()
join_date = models.DateField('date joined')
def __str__(self):
return self.display_name
def set_password(self, new_password):
self.password = new_password
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
Implement User that extends Django's AbstractBaseUser.from django.contrib.auth.models import AbstractBaseUser
from django.db import models
# Create your models here.
class User(AbstractBaseUser):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
join_date = models.DateField('date joined')
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = []
def get_full_name(self):
return self.display_name
def get_short_name(self):
return self.display_name
def __str__(self):
return self.display_name
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
|
<commit_before>from django.db import models
# Create your models here.
class User(models.Model):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
password = models.TextField()
join_date = models.DateField('date joined')
def __str__(self):
return self.display_name
def set_password(self, new_password):
self.password = new_password
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
<commit_msg>Implement User that extends Django's AbstractBaseUser.<commit_after>from django.contrib.auth.models import AbstractBaseUser
from django.db import models
# Create your models here.
class User(AbstractBaseUser):
username = models.CharField(max_length=20)
display_name = models.CharField(max_length=30)
join_date = models.DateField('date joined')
USERNAME_FIELD = 'username'
REQUIRED_FIELDS = []
def get_full_name(self):
return self.display_name
def get_short_name(self):
return self.display_name
def __str__(self):
return self.display_name
class Post(models.Model):
author = models.ForeignKey(User)
post_text = models.CharField(max_length=500)
post_time = models.DateTimeField('time posted')
def __str__(self):
return self.post_text
|
fc85f8846c188992438c935b9ba1ff0394bbc866
|
deployment/cfn/utils/constants.py
|
deployment/cfn/utils/constants.py
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
|
Increase options for EC2 and RDS instance types
|
Increase options for EC2 and RDS instance types
Adding small through large for both.
|
Python
|
apache-2.0
|
project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,project-icp/bee-pollinator-app,lliss/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,WikiWatershed/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,kdeloach/model-my-watershed,lliss/model-my-watershed,project-icp/bee-pollinator-app,kdeloach/model-my-watershed,kdeloach/model-my-watershed,project-icp/bee-pollinator-app,WikiWatershed/model-my-watershed
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
Increase options for EC2 and RDS instance types
Adding small through large for both.
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
|
<commit_before>EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
<commit_msg>Increase options for EC2 and RDS instance types
Adding small through large for both.<commit_after>
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
|
EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
Increase options for EC2 and RDS instance types
Adding small through large for both.EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
|
<commit_before>EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
<commit_msg>Increase options for EC2 and RDS instance types
Adding small through large for both.<commit_after>EC2_INSTANCE_TYPES = [
't2.micro',
't2.small',
't2.medium',
't2.large'
]
RDS_INSTANCE_TYPES = [
'db.t2.micro',
'db.t2.small',
'db.t2.medium',
'db.t2.large'
]
ELASTICACHE_INSTANCE_TYPES = [
'cache.m1.small'
]
ALLOW_ALL_CIDR = '0.0.0.0/0'
VPC_CIDR = '10.0.0.0/16'
GRAPHITE = 2003
GRAPHITE_WEB = 8080
HTTP = 80
HTTPS = 443
KIBANA = 5601
POSTGRESQL = 5432
REDIS = 6379
RELP = 20514
SSH = 22
STATSITE = 8125
|
f70eec24ef936db6318464da27dc9c619da339d3
|
scratch/asb/experiment_json_to_cbf_def.py
|
scratch/asb/experiment_json_to_cbf_def.py
|
from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Apply the header file to the cbfs with cxi.apply_metrology
# Note hardcoded distance of 105
from dials.util.command_line import Importer
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
importer = Importer(['refined_experiments.json'], check_format=False)
experiment = importer.experiments[0]
detector = experiment.detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'quad_refined.def', None, 105, header_only=True)
print "Done"
|
from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Note hardcoded distance of 100 isn't relevant for just a cbf header
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
class Script(object):
def __init__(self):
# Create the parser
self.parser = OptionParser(
read_experiments=True)
def run(self):
params, options = self.parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
detector = experiments[0].detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'refined_detector.def', None, 100, header_only=True)
print "Done"
if __name__ == '__main__':
from dials.util import halraiser
try:
script = Script()
script.run()
except Exception as e:
halraiser(e)
|
Refactor for new OptionParser interface
|
Refactor for new OptionParser interface
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Apply the header file to the cbfs with cxi.apply_metrology
# Note hardcoded distance of 105
from dials.util.command_line import Importer
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
importer = Importer(['refined_experiments.json'], check_format=False)
experiment = importer.experiments[0]
detector = experiment.detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'quad_refined.def', None, 105, header_only=True)
print "Done"
Refactor for new OptionParser interface
|
from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Note hardcoded distance of 100 isn't relevant for just a cbf header
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
class Script(object):
def __init__(self):
# Create the parser
self.parser = OptionParser(
read_experiments=True)
def run(self):
params, options = self.parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
detector = experiments[0].detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'refined_detector.def', None, 100, header_only=True)
print "Done"
if __name__ == '__main__':
from dials.util import halraiser
try:
script = Script()
script.run()
except Exception as e:
halraiser(e)
|
<commit_before>from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Apply the header file to the cbfs with cxi.apply_metrology
# Note hardcoded distance of 105
from dials.util.command_line import Importer
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
importer = Importer(['refined_experiments.json'], check_format=False)
experiment = importer.experiments[0]
detector = experiment.detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'quad_refined.def', None, 105, header_only=True)
print "Done"
<commit_msg>Refactor for new OptionParser interface<commit_after>
|
from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Note hardcoded distance of 100 isn't relevant for just a cbf header
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
class Script(object):
def __init__(self):
# Create the parser
self.parser = OptionParser(
read_experiments=True)
def run(self):
params, options = self.parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
detector = experiments[0].detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'refined_detector.def', None, 100, header_only=True)
print "Done"
if __name__ == '__main__':
from dials.util import halraiser
try:
script = Script()
script.run()
except Exception as e:
halraiser(e)
|
from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Apply the header file to the cbfs with cxi.apply_metrology
# Note hardcoded distance of 105
from dials.util.command_line import Importer
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
importer = Importer(['refined_experiments.json'], check_format=False)
experiment = importer.experiments[0]
detector = experiment.detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'quad_refined.def', None, 105, header_only=True)
print "Done"
Refactor for new OptionParser interfacefrom __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Note hardcoded distance of 100 isn't relevant for just a cbf header
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
class Script(object):
def __init__(self):
# Create the parser
self.parser = OptionParser(
read_experiments=True)
def run(self):
params, options = self.parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
detector = experiments[0].detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'refined_detector.def', None, 100, header_only=True)
print "Done"
if __name__ == '__main__':
from dials.util import halraiser
try:
script = Script()
script.run()
except Exception as e:
halraiser(e)
|
<commit_before>from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Apply the header file to the cbfs with cxi.apply_metrology
# Note hardcoded distance of 105
from dials.util.command_line import Importer
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
importer = Importer(['refined_experiments.json'], check_format=False)
experiment = importer.experiments[0]
detector = experiment.detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'quad_refined.def', None, 105, header_only=True)
print "Done"
<commit_msg>Refactor for new OptionParser interface<commit_after>from __future__ import division
# Script to convert the output from refine_quadrants to a header file
# Note hardcoded distance of 100 isn't relevant for just a cbf header
from dials.util.options import OptionParser
from dials.util.options import flatten_experiments
from xfel.cftbx.detector.cspad_cbf_tbx import write_cspad_cbf, map_detector_to_basis_dict
class Script(object):
def __init__(self):
# Create the parser
self.parser = OptionParser(
read_experiments=True)
def run(self):
params, options = self.parser.parse_args(show_diff_phil=True)
experiments = flatten_experiments(params.input.experiments)
detector = experiments[0].detector
metro = map_detector_to_basis_dict(detector)
write_cspad_cbf(None, metro, 'cbf', None, 'refined_detector.def', None, 100, header_only=True)
print "Done"
if __name__ == '__main__':
from dials.util import halraiser
try:
script = Script()
script.run()
except Exception as e:
halraiser(e)
|
ed9635ab7ca086bb79a48daae8a390887b7bf78f
|
datadict/datadict_utils.py
|
datadict/datadict_utils.py
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0, dtype=object)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
Read datadict file as-is, without type-guessing
|
Read datadict file as-is, without type-guessing
|
Python
|
bsd-3-clause
|
sibis-platform/ncanda-datacore,sibis-platform/ncanda-datacore,sibis-platform/ncanda-data-integration,sibis-platform/ncanda-datacore,sibis-platform/ncanda-data-integration
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
Read datadict file as-is, without type-guessing
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0, dtype=object)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
<commit_before>import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
<commit_msg>Read datadict file as-is, without type-guessing<commit_after>
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0, dtype=object)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
Read datadict file as-is, without type-guessingimport pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0, dtype=object)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
<commit_before>import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
<commit_msg>Read datadict file as-is, without type-guessing<commit_after>import pandas as pd
def load_datadict(filepath, trim_index=True, trim_all=False):
df = pd.read_csv(filepath, index_col=0, dtype=object)
if trim_index:
df.index = df.index.to_series().str.strip()
if trim_all:
df = df.applymap(lambda x: x.strip() if type(x) is str else x)
return df
def insert_rows_at(main_df, index_name, inserted_df, insert_before=False):
# Not checking if index exists because that will be apparent from error
# NOTE: This will not work with duplicate indices
pre_df = main_df.loc[:index_name]
post_df = main_df.loc[index_name:]
# Both pre_ and post_ contain the value at index_name, so one needs to
# drop it
if not insert_before:
pre_df = pre_df.drop(index_name)
else:
post_df = post_df.drop(index_name)
return pd.concat([pre_df, inserted_df, post_df],
axis=0)
|
d7697891f86603d9901f02209bb4921fc1e2d209
|
smif/http_api/app.py
|
smif/http_api/app.py
|
"""Provide APP constant for the purposes of manually running the flask app
For example, set up environment variables then run the app::
export FLASK_APP=smif.http_api.app
export FLASK_DEBUG=1
flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
|
"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
|
Add front end build step to comment on running manually
|
Add front end build step to comment on running manually
|
Python
|
mit
|
willu47/smif,nismod/smif,willu47/smif,nismod/smif,nismod/smif,nismod/smif,tomalrussell/smif,willu47/smif,tomalrussell/smif,tomalrussell/smif,tomalrussell/smif,willu47/smif
|
"""Provide APP constant for the purposes of manually running the flask app
For example, set up environment variables then run the app::
export FLASK_APP=smif.http_api.app
export FLASK_DEBUG=1
flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
Add front end build step to comment on running manually
|
"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
|
<commit_before>"""Provide APP constant for the purposes of manually running the flask app
For example, set up environment variables then run the app::
export FLASK_APP=smif.http_api.app
export FLASK_DEBUG=1
flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
<commit_msg>Add front end build step to comment on running manually<commit_after>
|
"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
|
"""Provide APP constant for the purposes of manually running the flask app
For example, set up environment variables then run the app::
export FLASK_APP=smif.http_api.app
export FLASK_DEBUG=1
flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
Add front end build step to comment on running manually"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
|
<commit_before>"""Provide APP constant for the purposes of manually running the flask app
For example, set up environment variables then run the app::
export FLASK_APP=smif.http_api.app
export FLASK_DEBUG=1
flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
<commit_msg>Add front end build step to comment on running manually<commit_after>"""Provide APP constant for the purposes of manually running the flask app
For example, build the front end, then run the app with environment variables::
cd smif/app/
npm run build
cd ../http_api/
FLASK_APP=smif.http_api.app FLASK_DEBUG=1 flask run
"""
import os
from smif.data_layer import DatafileInterface
from smif.http_api import create_app
def get_connection():
"""Return a data_layer connection
"""
return DatafileInterface(
os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'fixtures', 'single_run')
)
APP = create_app(
static_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
template_folder=os.path.join(os.path.dirname(__file__), '..', 'app', 'dist'),
get_connection=get_connection
)
|
0f55195f4461c80e85d132026a70049b36b8cc0b
|
sub_numbers_lambda/handle.py
|
sub_numbers_lambda/handle.py
|
import json
import time
def lambda_handler(event,context):
number_1 = event['key1']
number_2 = event['key2']
return {"number" : abs(number_1 - number_2)}
|
import json
def lambda_handler(event, context):
number_1 = int(event['key1'])
number_2 = int(event['key2'])
return {"number" : abs(number_1 - number_2)}
|
Add int() function for casting from string to integer
|
Add int() function for casting from string to integer
|
Python
|
mit
|
OsamaJBR/teach-me-aws-stepfunctions
|
import json
import time
def lambda_handler(event,context):
number_1 = event['key1']
number_2 = event['key2']
return {"number" : abs(number_1 - number_2)}Add int() function for casting from string to integer
|
import json
def lambda_handler(event, context):
number_1 = int(event['key1'])
number_2 = int(event['key2'])
return {"number" : abs(number_1 - number_2)}
|
<commit_before>import json
import time
def lambda_handler(event,context):
number_1 = event['key1']
number_2 = event['key2']
return {"number" : abs(number_1 - number_2)}<commit_msg>Add int() function for casting from string to integer<commit_after>
|
import json
def lambda_handler(event, context):
number_1 = int(event['key1'])
number_2 = int(event['key2'])
return {"number" : abs(number_1 - number_2)}
|
import json
import time
def lambda_handler(event,context):
number_1 = event['key1']
number_2 = event['key2']
return {"number" : abs(number_1 - number_2)}Add int() function for casting from string to integerimport json
def lambda_handler(event, context):
number_1 = int(event['key1'])
number_2 = int(event['key2'])
return {"number" : abs(number_1 - number_2)}
|
<commit_before>import json
import time
def lambda_handler(event,context):
number_1 = event['key1']
number_2 = event['key2']
return {"number" : abs(number_1 - number_2)}<commit_msg>Add int() function for casting from string to integer<commit_after>import json
def lambda_handler(event, context):
number_1 = int(event['key1'])
number_2 = int(event['key2'])
return {"number" : abs(number_1 - number_2)}
|
99aa2f415659e73329982110cc9bead50a856226
|
zsl/__init__.py
|
zsl/__init__.py
|
"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.3'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
|
"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.5'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
|
Increment version after unit testing refactoring
|
Increment version after unit testing refactoring
|
Python
|
mit
|
AtteqCom/zsl,AtteqCom/zsl
|
"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.3'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
Increment version after unit testing refactoring
|
"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.5'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
|
<commit_before>"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.3'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
<commit_msg>Increment version after unit testing refactoring<commit_after>
|
"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.5'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
|
"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.3'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
Increment version after unit testing refactoring"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.5'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
|
<commit_before>"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.3'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
<commit_msg>Increment version after unit testing refactoring<commit_after>"""
:mod:`zsl` -- zsl module
========================
Main service module.
:platform: Unix, Windows
:synopsis: The Atteq Service Layer. Service for exposing data to clients. Just provides DB access, feeds access and \
other various aspects of service applications.
.. moduleauthor:: Martin Babka <babka@atteq.com>
"""
from __future__ import unicode_literals
__version__ = '0.15.5'
from flask import Config
from injector import Module
from zsl.application.initialization_context import InitializationContext as ApplicationContext
from zsl.utils.injection_helper import inject
from zsl.application.service_application import ServiceApplication
Zsl = ServiceApplication
# placeholder for default value used in function declaration for arguments which will be injected
Injected = None
|
2448e6ab81f8a2a0b320a07b42a3f8707ec918cb
|
chartflo/apps.py
|
chartflo/apps.py
|
from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
GENERATORS = {}
cf = None
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e)
GENERATORS = generators
# Initialize class instance
from chartflo.engine import ChartFlo
cf = ChartFlo()
if err.exists:
err.trace()
|
from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
from chartflo.engine import ChartFlo
GENERATORS = {}
cf = ChartFlo()
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e, self.ready,
"Can not initialize Chartflo generators")
GENERATORS = generators
if err.exists:
err.trace()
|
Fix in app initialization for generators
|
Fix in app initialization for generators
|
Python
|
mit
|
synw/django-chartflo,synw/django-chartflo,synw/django-chartflo
|
from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
GENERATORS = {}
cf = None
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e)
GENERATORS = generators
# Initialize class instance
from chartflo.engine import ChartFlo
cf = ChartFlo()
if err.exists:
err.trace()
Fix in app initialization for generators
|
from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
from chartflo.engine import ChartFlo
GENERATORS = {}
cf = ChartFlo()
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e, self.ready,
"Can not initialize Chartflo generators")
GENERATORS = generators
if err.exists:
err.trace()
|
<commit_before>from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
GENERATORS = {}
cf = None
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e)
GENERATORS = generators
# Initialize class instance
from chartflo.engine import ChartFlo
cf = ChartFlo()
if err.exists:
err.trace()
<commit_msg>Fix in app initialization for generators<commit_after>
|
from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
from chartflo.engine import ChartFlo
GENERATORS = {}
cf = ChartFlo()
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e, self.ready,
"Can not initialize Chartflo generators")
GENERATORS = generators
if err.exists:
err.trace()
|
from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
GENERATORS = {}
cf = None
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e)
GENERATORS = generators
# Initialize class instance
from chartflo.engine import ChartFlo
cf = ChartFlo()
if err.exists:
err.trace()
Fix in app initialization for generatorsfrom __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
from chartflo.engine import ChartFlo
GENERATORS = {}
cf = ChartFlo()
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e, self.ready,
"Can not initialize Chartflo generators")
GENERATORS = generators
if err.exists:
err.trace()
|
<commit_before>from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
GENERATORS = {}
cf = None
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e)
GENERATORS = generators
# Initialize class instance
from chartflo.engine import ChartFlo
cf = ChartFlo()
if err.exists:
err.trace()
<commit_msg>Fix in app initialization for generators<commit_after>from __future__ import unicode_literals
import importlib
from goerr import err
from django.apps import AppConfig
from chartflo.engine import ChartFlo
GENERATORS = {}
cf = ChartFlo()
def load_generator(modname, subgenerator=None):
try:
path = modname + ".chartflo"
if subgenerator is not None:
path = path + "." + subgenerator
mod = importlib.import_module(path)
generator = getattr(mod, "run")
return generator
except ImportError as e:
if "No module named" not in str(e):
err.new(e)
return None
except Exception as e:
err.new(e, load_generator, "Error loading module")
class ChartfloConfig(AppConfig):
name = 'chartflo'
verbose_name = "Chartflo"
def ready(self):
"""
Load generators and initialize class instance
"""
global GENERATORS, cf
from django.conf import settings
apps = settings.INSTALLED_APPS
generators = {}
for app in apps:
try:
res = load_generator(app)
if res is not None:
generators[app] = res
except Exception as e:
err.new(e, self.ready,
"Can not initialize Chartflo generators")
GENERATORS = generators
if err.exists:
err.trace()
|
758315053a4aaa4ad027f8edd71ec6953d058300
|
config/urls.py
|
config/urls.py
|
"""All available endpoints of the chaospizza web project."""
# pylint: disable=C0111
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
# pylint: disable=C0111
"""All available endpoints of the chaospizza web project."""
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
Fix module docstring for url config
|
docs(project): Fix module docstring for url config
|
Python
|
mit
|
chaosdorf/chaospizza,chaosdorf/chaospizza,chaosdorf/chaospizza
|
"""All available endpoints of the chaospizza web project."""
# pylint: disable=C0111
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
docs(project): Fix module docstring for url config
|
# pylint: disable=C0111
"""All available endpoints of the chaospizza web project."""
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
<commit_before>"""All available endpoints of the chaospizza web project."""
# pylint: disable=C0111
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
<commit_msg>docs(project): Fix module docstring for url config<commit_after>
|
# pylint: disable=C0111
"""All available endpoints of the chaospizza web project."""
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
"""All available endpoints of the chaospizza web project."""
# pylint: disable=C0111
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
docs(project): Fix module docstring for url config# pylint: disable=C0111
"""All available endpoints of the chaospizza web project."""
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
<commit_before>"""All available endpoints of the chaospizza web project."""
# pylint: disable=C0111
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
<commit_msg>docs(project): Fix module docstring for url config<commit_after># pylint: disable=C0111
"""All available endpoints of the chaospizza web project."""
from django.conf import settings
from django.conf.urls import include, url
# from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponse
# from django.views.generic import TemplateView
from django.views import defaults as default_views
def home(request):
"""Django view which returns simple hello world text."""
print(request)
return HttpResponse("hi")
urlpatterns = [
url(r'^$', home),
url(r'^admin/', admin.site.urls),
url(r'^orders/', include('orders.urls')),
url(r'^menus/', include('menus.urls')),
]
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
462397f86c72d8746daa35756d7a26694c2cb557
|
huxley/settings/conference.py
|
huxley/settings/conference.py
|
# Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 66
|
# Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 67
|
Change to the current session for registration opening
|
Change to the current session for registration opening
|
Python
|
bsd-3-clause
|
bmun/huxley,bmun/huxley,bmun/huxley,bmun/huxley
|
# Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 66
Change to the current session for registration opening
|
# Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 67
|
<commit_before># Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 66
<commit_msg>Change to the current session for registration opening<commit_after>
|
# Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 67
|
# Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 66
Change to the current session for registration opening# Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 67
|
<commit_before># Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 66
<commit_msg>Change to the current session for registration opening<commit_after># Copyright (c) 2011-2015 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
SESSION = 67
|
43bc1f2670b722d5fb1b0e34a0b098fd2f41bd77
|
icekit/plugins/image/admin.py
|
icekit/plugins/image/admin.py
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['description', 'title', 'thumbnail']
list_display_links = ['description', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
def title(self, image):
return image.title
def description(self, image):
return str(image)
admin.site.register(models.Image, ImageAdmin)
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'alt_text', 'title', ]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
Add search options for images, reorder field listing, use field names in list display rather than properties.
|
Add search options for images, reorder field listing, use field names in list display rather than properties.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['description', 'title', 'thumbnail']
list_display_links = ['description', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
def title(self, image):
return image.title
def description(self, image):
return str(image)
admin.site.register(models.Image, ImageAdmin)
Add search options for images, reorder field listing, use field names in list display rather than properties.
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'alt_text', 'title', ]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
<commit_before>from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['description', 'title', 'thumbnail']
list_display_links = ['description', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
def title(self, image):
return image.title
def description(self, image):
return str(image)
admin.site.register(models.Image, ImageAdmin)
<commit_msg>Add search options for images, reorder field listing, use field names in list display rather than properties.<commit_after>
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'alt_text', 'title', ]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['description', 'title', 'thumbnail']
list_display_links = ['description', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
def title(self, image):
return image.title
def description(self, image):
return str(image)
admin.site.register(models.Image, ImageAdmin)
Add search options for images, reorder field listing, use field names in list display rather than properties.from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'alt_text', 'title', ]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
<commit_before>from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['description', 'title', 'thumbnail']
list_display_links = ['description', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
def title(self, image):
return image.title
def description(self, image):
return str(image)
admin.site.register(models.Image, ImageAdmin)
<commit_msg>Add search options for images, reorder field listing, use field names in list display rather than properties.<commit_after>from django.contrib import admin
from icekit.utils.admin.mixins import ThumbnailAdminMixin
from . import models
class ImageAdmin(ThumbnailAdminMixin, admin.ModelAdmin):
list_display = ['thumbnail', 'alt_text', 'title', ]
list_display_links = ['alt_text', 'thumbnail']
filter_horizontal = ['categories', ]
list_filter = ['categories', 'is_active', ]
search_fields = ['title', 'alt_text', 'caption', 'admin_notes', ]
# ThumbnailAdminMixin attributes
thumbnail_field = 'image'
thumbnail_options = {
'size': (150, 150),
}
admin.site.register(models.Image, ImageAdmin)
|
5f01b9da3cfa899037ac9f7c3262a08c074b5bf9
|
bedrock/stories/urls.py
|
bedrock/stories/urls.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
from bedrock.redirects.util import redirect
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
# REMOVE WHEN REAL PAGE GOES LIVE
redirect(r"^joy-of-color/?$", "https://blog.mozilla.org/en/products/firefox/firefox-news/independent-voices/", permanent=False),
)
|
Add temporary redirect for stories URL
|
Add temporary redirect for stories URL
|
Python
|
mpl-2.0
|
mozilla/bedrock,mozilla/bedrock,alexgibson/bedrock,craigcook/bedrock,craigcook/bedrock,alexgibson/bedrock,craigcook/bedrock,alexgibson/bedrock,craigcook/bedrock,alexgibson/bedrock,mozilla/bedrock,mozilla/bedrock
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
)
Add temporary redirect for stories URL
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
from bedrock.redirects.util import redirect
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
# REMOVE WHEN REAL PAGE GOES LIVE
redirect(r"^joy-of-color/?$", "https://blog.mozilla.org/en/products/firefox/firefox-news/independent-voices/", permanent=False),
)
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
)
<commit_msg>Add temporary redirect for stories URL<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
from bedrock.redirects.util import redirect
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
# REMOVE WHEN REAL PAGE GOES LIVE
redirect(r"^joy-of-color/?$", "https://blog.mozilla.org/en/products/firefox/firefox-news/independent-voices/", permanent=False),
)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
)
Add temporary redirect for stories URL# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
from bedrock.redirects.util import redirect
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
# REMOVE WHEN REAL PAGE GOES LIVE
redirect(r"^joy-of-color/?$", "https://blog.mozilla.org/en/products/firefox/firefox-news/independent-voices/", permanent=False),
)
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
)
<commit_msg>Add temporary redirect for stories URL<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
from bedrock.mozorg.util import page
from bedrock.redirects.util import redirect
urlpatterns = (
page("", "stories/landing.html"),
page("art-of-engagement/", "stories/articles/art-of-engagement.html"),
page("build-together/", "stories/articles/build-together.html"),
page("community-champion/", "stories/articles/community-champion.html"),
# REMOVE WHEN REAL PAGE GOES LIVE
redirect(r"^joy-of-color/?$", "https://blog.mozilla.org/en/products/firefox/firefox-news/independent-voices/", permanent=False),
)
|
6cda221478d3f67bc10ed13eb57854b493f6dbe2
|
integration-test/366-beaches.py
|
integration-test/366-beaches.py
|
# Baker beach, SF
# https://www.openstreetmap.org/way/195638009
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
|
# Baker beach, SF
# https://www.openstreetmap.org/relation/6260732
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
|
Update test - Baker beach was turned into a relation.
|
Update test - Baker beach was turned into a relation.
|
Python
|
mit
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
# Baker beach, SF
# https://www.openstreetmap.org/way/195638009
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
Update test - Baker beach was turned into a relation.
|
# Baker beach, SF
# https://www.openstreetmap.org/relation/6260732
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
|
<commit_before># Baker beach, SF
# https://www.openstreetmap.org/way/195638009
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
<commit_msg>Update test - Baker beach was turned into a relation.<commit_after>
|
# Baker beach, SF
# https://www.openstreetmap.org/relation/6260732
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
|
# Baker beach, SF
# https://www.openstreetmap.org/way/195638009
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
Update test - Baker beach was turned into a relation.# Baker beach, SF
# https://www.openstreetmap.org/relation/6260732
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
|
<commit_before># Baker beach, SF
# https://www.openstreetmap.org/way/195638009
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
<commit_msg>Update test - Baker beach was turned into a relation.<commit_after># Baker beach, SF
# https://www.openstreetmap.org/relation/6260732
assert_has_feature(
18, 41881, 101308, 'landuse',
{ 'kind': 'beach' })
|
f1a6025fb7ba8e69ec52e868ec0f6fc7783aa688
|
qipipe/helpers/xnat_config.py
|
qipipe/helpers/xnat_config.py
|
import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. ``xnat.cfg`` in the home ``.xnat`` subdirectory
2. ``xnat.cfg`` in the home directory
3. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
for f in [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
|
import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. The ``XNAT_CFG`` environment variable, if it is set.
1. ``xnat.cfg`` in the current working directory
2. ``xnat.cfg`` in the home ``.xnat`` subdirectory
3. ``xnat.cfg`` in the home directory
4. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
cfgs = [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]
env_cfg = os.getenv('XNAT_CFG')
if env_cfg:
cfgs.insert(0, env_cfg)
for f in cfgs:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
|
Add XNAT_CFG env var to config list.
|
Add XNAT_CFG env var to config list.
|
Python
|
bsd-2-clause
|
ohsu-qin/qipipe
|
import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. ``xnat.cfg`` in the home ``.xnat`` subdirectory
2. ``xnat.cfg`` in the home directory
3. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
for f in [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
Add XNAT_CFG env var to config list.
|
import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. The ``XNAT_CFG`` environment variable, if it is set.
1. ``xnat.cfg`` in the current working directory
2. ``xnat.cfg`` in the home ``.xnat`` subdirectory
3. ``xnat.cfg`` in the home directory
4. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
cfgs = [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]
env_cfg = os.getenv('XNAT_CFG')
if env_cfg:
cfgs.insert(0, env_cfg)
for f in cfgs:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
|
<commit_before>import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. ``xnat.cfg`` in the home ``.xnat`` subdirectory
2. ``xnat.cfg`` in the home directory
3. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
for f in [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
<commit_msg>Add XNAT_CFG env var to config list.<commit_after>
|
import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. The ``XNAT_CFG`` environment variable, if it is set.
1. ``xnat.cfg`` in the current working directory
2. ``xnat.cfg`` in the home ``.xnat`` subdirectory
3. ``xnat.cfg`` in the home directory
4. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
cfgs = [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]
env_cfg = os.getenv('XNAT_CFG')
if env_cfg:
cfgs.insert(0, env_cfg)
for f in cfgs:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
|
import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. ``xnat.cfg`` in the home ``.xnat`` subdirectory
2. ``xnat.cfg`` in the home directory
3. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
for f in [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
Add XNAT_CFG env var to config list.import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. The ``XNAT_CFG`` environment variable, if it is set.
1. ``xnat.cfg`` in the current working directory
2. ``xnat.cfg`` in the home ``.xnat`` subdirectory
3. ``xnat.cfg`` in the home directory
4. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
cfgs = [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]
env_cfg = os.getenv('XNAT_CFG')
if env_cfg:
cfgs.insert(0, env_cfg)
for f in cfgs:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
|
<commit_before>import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. ``xnat.cfg`` in the home ``.xnat`` subdirectory
2. ``xnat.cfg`` in the home directory
3. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
for f in [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
<commit_msg>Add XNAT_CFG env var to config list.<commit_after>import os
__all__ = ['default_configuration']
def default_configuration():
"""
Returns the XNAT configuration file location determined as the first file
found in the following precedence order:
1. The ``XNAT_CFG`` environment variable, if it is set.
1. ``xnat.cfg`` in the current working directory
2. ``xnat.cfg`` in the home ``.xnat`` subdirectory
3. ``xnat.cfg`` in the home directory
4. ``xnat.cfg`` in the ``/etc`` directory
:return: the configuration location, if any
"""
cfgs = [CWD_CFG, DOT_CFG, HOME_CFG, ETC_CFG]
env_cfg = os.getenv('XNAT_CFG')
if env_cfg:
cfgs.insert(0, env_cfg)
for f in cfgs:
if os.path.exists(f):
return f
CWD_CFG = os.path.join(os.getcwd(), 'xnat.cfg')
"""The XNAT current directory configuration location."""
DOT_CFG = os.path.join(os.path.expanduser('~'), '.xnat', 'xnat.cfg')
"""The XNAT home ``.xnat`` subdirectory configuration location."""
HOME_CFG = os.path.join(os.path.expanduser('~'), 'xnat.cfg')
"""The XNAT home configuration location."""
ETC_CFG = os.path.join('/etc', 'xnat.cfg')
"""The Linux global ``/etc`` XNAT configuration location."""
|
9921b6bd73c5256a3b65c2a5106717ce0fc8f0cf
|
djangorestframework/utils/breadcrumbs.py
|
djangorestframework/utils/breadcrumbs.py
|
from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
|
from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
|
Use get_script_prefix to play nicely if not installed at the root.
|
Use get_script_prefix to play nicely if not installed at the root.
|
Python
|
bsd-2-clause
|
rafaelcaricio/django-rest-framework,maryokhin/django-rest-framework,jtiai/django-rest-framework,cheif/django-rest-framework,vstoykov/django-rest-framework,wwj718/django-rest-framework,ebsaral/django-rest-framework,jpadilla/django-rest-framework,damycra/django-rest-framework,kezabelle/django-rest-framework,cyberj/django-rest-framework,hnarayanan/django-rest-framework,kgeorgy/django-rest-framework,antonyc/django-rest-framework,lubomir/django-rest-framework,ambivalentno/django-rest-framework,potpath/django-rest-framework,ashishfinoit/django-rest-framework,waytai/django-rest-framework,nhorelik/django-rest-framework,iheitlager/django-rest-framework,rubendura/django-rest-framework,aericson/django-rest-framework,rubendura/django-rest-framework,uploadcare/django-rest-framework,atombrella/django-rest-framework,krinart/django-rest-framework,HireAnEsquire/django-rest-framework,gregmuellegger/django-rest-framework,thedrow/django-rest-framework-1,mgaitan/django-rest-framework,hnakamur/django-rest-framework,callorico/django-rest-framework,hnakamur/django-rest-framework,tigeraniya/django-rest-framework,douwevandermeij/django-rest-framework,dmwyatt/django-rest-framework,agconti/django-rest-framework,canassa/django-rest-framework,johnraz/django-rest-framework,linovia/django-rest-framework,wwj718/django-rest-framework,brandoncazander/django-rest-framework,canassa/django-rest-framework,ashishfinoit/django-rest-framework,agconti/django-rest-framework,krinart/django-rest-framework,xiaotangyuan/django-rest-framework,cyberj/django-rest-framework,uruz/django-rest-framework,akalipetis/django-rest-framework,tcroiset/django-rest-framework,buptlsl/django-rest-framework,d0ugal/django-rest-framework,kgeorgy/django-rest-framework,vstoykov/django-rest-framework,thedrow/django-rest-framework-1,jerryhebert/django-rest-framework,delinhabit/django-rest-framework,davesque/django-rest-framework,potpath/django-rest-framework,kennydude/django-rest-framework,qsorix/django-rest-framework,uploadcare/django-rest-framework,ebsaral/django-rest-framework,jness/django-rest-framework,MJafarMashhadi/django-rest-framework,tcroiset/django-rest-framework,adambain-vokal/django-rest-framework,johnraz/django-rest-framework,mgaitan/django-rest-framework,wedaly/django-rest-framework,sheppard/django-rest-framework,uploadcare/django-rest-framework,rhblind/django-rest-framework,sehmaschine/django-rest-framework,sbellem/django-rest-framework,arpheno/django-rest-framework,rafaelang/django-rest-framework,bluedazzle/django-rest-framework,jtiai/django-rest-framework,antonyc/django-rest-framework,ajaali/django-rest-framework,rhblind/django-rest-framework,ebsaral/django-rest-framework,akalipetis/django-rest-framework,buptlsl/django-rest-framework,potpath/django-rest-framework,xiaotangyuan/django-rest-framework,jerryhebert/django-rest-framework,douwevandermeij/django-rest-framework,qsorix/django-rest-framework,callorico/django-rest-framework,sbellem/django-rest-framework,sehmaschine/django-rest-framework,elim/django-rest-framework,akalipetis/django-rest-framework,kylefox/django-rest-framework,adambain-vokal/django-rest-framework,maryokhin/django-rest-framework,fishky/django-rest-framework,werthen/django-rest-framework,simudream/django-rest-framework,delinhabit/django-rest-framework,aericson/django-rest-framework,abdulhaq-e/django-rest-framework,simudream/django-rest-framework,paolopaolopaolo/django-rest-framework,jpulec/django-rest-framework,James1345/django-rest-framework,fishky/django-rest-framework,ajaali/django-rest-framework,ashishfinoit/django-rest-framework,alacritythief/django-rest-framework,ticosax/django-rest-framework,cheif/django-rest-framework,wedaly/django-rest-framework,callorico/django-rest-framework,YBJAY00000/django-rest-framework,mgaitan/django-rest-framework,YBJAY00000/django-rest-framework,wzbozon/django-rest-framework,bluedazzle/django-rest-framework,elim/django-rest-framework,kylefox/django-rest-framework,alacritythief/django-rest-framework,kennydude/django-rest-framework,hunter007/django-rest-framework,abdulhaq-e/django-rest-framework,leeahoward/django-rest-framework,damycra/django-rest-framework,andriy-s/django-rest-framework,waytai/django-rest-framework,aericson/django-rest-framework,leeahoward/django-rest-framework,wzbozon/django-rest-framework,wangpanjun/django-rest-framework,tcroiset/django-rest-framework,bluedazzle/django-rest-framework,jness/django-rest-framework,tigeraniya/django-rest-framework,andriy-s/django-rest-framework,hnakamur/django-rest-framework,rhblind/django-rest-framework,nryoung/django-rest-framework,edx/django-rest-framework,AlexandreProenca/django-rest-framework,simudream/django-rest-framework,nhorelik/django-rest-framework,krinart/django-rest-framework,wangpanjun/django-rest-framework,davesque/django-rest-framework,ticosax/django-rest-framework,jpulec/django-rest-framework,hunter007/django-rest-framework,jpulec/django-rest-framework,maryokhin/django-rest-framework,iheitlager/django-rest-framework,rafaelcaricio/django-rest-framework,yiyocx/django-rest-framework,jpadilla/django-rest-framework,jerryhebert/django-rest-framework,ezheidtmann/django-rest-framework,waytai/django-rest-framework,tomchristie/django-rest-framework,gregmuellegger/django-rest-framework,wzbozon/django-rest-framework,James1345/django-rest-framework,werthen/django-rest-framework,atombrella/django-rest-framework,pombredanne/django-rest-framework,adambain-vokal/django-rest-framework,paolopaolopaolo/django-rest-framework,gregmuellegger/django-rest-framework,qsorix/django-rest-framework,raphaelmerx/django-rest-framework,dmwyatt/django-rest-framework,brandoncazander/django-rest-framework,xiaotangyuan/django-rest-framework,AlexandreProenca/django-rest-framework,werthen/django-rest-framework,kgeorgy/django-rest-framework,hnarayanan/django-rest-framework,jpadilla/django-rest-framework,ajaali/django-rest-framework,kylefox/django-rest-framework,ossanna16/django-rest-framework,justanr/django-rest-framework,pombredanne/django-rest-framework,YBJAY00000/django-rest-framework,atombrella/django-rest-framework,kezabelle/django-rest-framework,James1345/django-rest-framework,MJafarMashhadi/django-rest-framework,iheitlager/django-rest-framework,wangpanjun/django-rest-framework,ticosax/django-rest-framework,edx/django-rest-framework,d0ugal/django-rest-framework,rubendura/django-rest-framework,HireAnEsquire/django-rest-framework,cheif/django-rest-framework,nryoung/django-rest-framework,AlexandreProenca/django-rest-framework,brandoncazander/django-rest-framework,arpheno/django-rest-framework,MJafarMashhadi/django-rest-framework,raphaelmerx/django-rest-framework,kennydude/django-rest-framework,nryoung/django-rest-framework,lubomir/django-rest-framework,ossanna16/django-rest-framework,thedrow/django-rest-framework-1,justanr/django-rest-framework,buptlsl/django-rest-framework,lubomir/django-rest-framework,vstoykov/django-rest-framework,zeldalink0515/django-rest-framework,raphaelmerx/django-rest-framework,damycra/django-rest-framework,ambivalentno/django-rest-framework,nhorelik/django-rest-framework,VishvajitP/django-rest-framework,kezabelle/django-rest-framework,sheppard/django-rest-framework,leeahoward/django-rest-framework,dmwyatt/django-rest-framework,wedaly/django-rest-framework,justanr/django-rest-framework,uruz/django-rest-framework,delinhabit/django-rest-framework,tomchristie/django-rest-framework,VishvajitP/django-rest-framework,canassa/django-rest-framework,rafaelang/django-rest-framework,andriy-s/django-rest-framework,hunter007/django-rest-framework,paolopaolopaolo/django-rest-framework,hnarayanan/django-rest-framework,HireAnEsquire/django-rest-framework,abdulhaq-e/django-rest-framework,jness/django-rest-framework,douwevandermeij/django-rest-framework,pombredanne/django-rest-framework,ossanna16/django-rest-framework,linovia/django-rest-framework,cyberj/django-rest-framework,wwj718/django-rest-framework,d0ugal/django-rest-framework,sheppard/django-rest-framework,sehmaschine/django-rest-framework,tigeraniya/django-rest-framework,linovia/django-rest-framework,zeldalink0515/django-rest-framework,alacritythief/django-rest-framework,uruz/django-rest-framework,VishvajitP/django-rest-framework,ambivalentno/django-rest-framework,fishky/django-rest-framework,tomchristie/django-rest-framework,sbellem/django-rest-framework,zeldalink0515/django-rest-framework,arpheno/django-rest-framework,agconti/django-rest-framework,davesque/django-rest-framework,elim/django-rest-framework,jtiai/django-rest-framework,yiyocx/django-rest-framework,yiyocx/django-rest-framework,edx/django-rest-framework,johnraz/django-rest-framework,antonyc/django-rest-framework,ezheidtmann/django-rest-framework,ezheidtmann/django-rest-framework,rafaelcaricio/django-rest-framework,rafaelang/django-rest-framework
|
from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
Use get_script_prefix to play nicely if not installed at the root.
|
from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
|
<commit_before>from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
<commit_msg>Use get_script_prefix to play nicely if not installed at the root.<commit_after>
|
from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
|
from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
Use get_script_prefix to play nicely if not installed at the root.from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
|
<commit_before>from django.core.urlresolvers import resolve
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list)
return breadcrumbs_recursive(url, [])
<commit_msg>Use get_script_prefix to play nicely if not installed at the root.<commit_after>from django.core.urlresolvers import resolve, get_script_prefix
def get_breadcrumbs(url):
"""Given a url returns a list of breadcrumbs, which are each a tuple of (name, url)."""
from djangorestframework.views import APIView
def breadcrumbs_recursive(url, breadcrumbs_list, prefix):
"""Add tuples of (name, url) to the breadcrumbs list, progressively chomping off parts of the url."""
try:
(view, unused_args, unused_kwargs) = resolve(url)
except Exception:
pass
else:
# Check if this is a REST framework view, and if so add it to the breadcrumbs
if isinstance(getattr(view, 'cls_instance', None), APIView):
breadcrumbs_list.insert(0, (view.cls_instance.get_name(), prefix + url))
if url == '':
# All done
return breadcrumbs_list
elif url.endswith('/'):
# Drop trailing slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url.rstrip('/'), breadcrumbs_list, prefix)
# Drop trailing non-slash off the end and continue to try to resolve more breadcrumbs
return breadcrumbs_recursive(url[:url.rfind('/') + 1], breadcrumbs_list, prefix)
prefix = get_script_prefix()
url = url[len(prefix):]
return breadcrumbs_recursive(url, [], prefix)
|
d34311cf7bc4dd33e020913538b28a1f5727ed92
|
kafka_influxdb/tests/encoder_test/test_echo_encoder.py
|
kafka_influxdb/tests/encoder_test/test_echo_encoder.py
|
import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield check_encode, msg
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
|
import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield self.check_encode(msg)
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
|
Fix unit test for echo encoder
|
Fix unit test for echo encoder
|
Python
|
apache-2.0
|
mre/kafka-influxdb,mre/kafka-influxdb
|
import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield check_encode, msg
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
Fix unit test for echo encoder
|
import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield self.check_encode(msg)
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
|
<commit_before>import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield check_encode, msg
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
<commit_msg>Fix unit test for echo encoder<commit_after>
|
import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield self.check_encode(msg)
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
|
import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield check_encode, msg
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
Fix unit test for echo encoderimport unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield self.check_encode(msg)
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
|
<commit_before>import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield check_encode, msg
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
<commit_msg>Fix unit test for echo encoder<commit_after>import unittest
from kafka_influxdb.encoder import echo_encoder
class TestEchoEncoder(unittest.TestCase):
def setUp(self):
self.encoder = echo_encoder.Encoder()
self.messages = [
"yeaal",
["this", "is", "a", "list"],
{'hash': {'maps': 'rule'}},
42,
42.23
]
def test_encode(self):
for msg in self.messages:
yield self.check_encode(msg)
def check_encode(self, msg):
""" Output must be same as input for echo sender """
self.assertEqual(self.encoder.encode(msg), msg)
|
3557c8f2ab3f7b3e1ca5468b322e81022355e40c
|
interage/api/models/metadata.py
|
interage/api/models/metadata.py
|
from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('acao')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
|
from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('gravidade')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
|
Fix atributo serializable de gravidades
|
Fix atributo serializable de gravidades
|
Python
|
mit
|
IntMed/interage_python_sdk
|
from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('acao')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
Fix atributo serializable de gravidades
|
from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('gravidade')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
|
<commit_before>from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('acao')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
<commit_msg>Fix atributo serializable de gravidades<commit_after>
|
from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('gravidade')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
|
from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('acao')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
Fix atributo serializable de gravidadesfrom . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('gravidade')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
|
<commit_before>from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('acao')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
<commit_msg>Fix atributo serializable de gravidades<commit_after>from . import APIMetadataModel
from .properties import PropertyDescriptor
class InteracaoMetadata(APIMetadataModel):
@property
@PropertyDescriptor.serializable('evidencia')
def evidencias(self):
return self.__evidencias
@evidencias.setter
@PropertyDescriptor.list
def evidencias(self, val):
self.__evidencias = val
@property
@PropertyDescriptor.serializable('acao')
def acoes(self):
return self.__acoes
@acoes.setter
@PropertyDescriptor.list
def acoes(self, val):
self.__acoes = val
@property
@PropertyDescriptor.serializable('gravidade')
def gravidades(self):
return self.__gravidades
@gravidades.setter
@PropertyDescriptor.list
def gravidades(self, val):
self.__gravidades = val
|
c2ae6fb563b1ecc20b11ec6d693bad8a7f9e8945
|
scrapple/utils/exceptions.py
|
scrapple/utils/exceptions.py
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
return
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
Update exception handling for levels argument
|
Update exception handling for levels argument
|
Python
|
mit
|
scrappleapp/scrapple,AlexMathew/scrapple,AlexMathew/scrapple,scrappleapp/scrapple,AlexMathew/scrapple
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
return
Update exception handling for levels argument
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
<commit_before>"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
return
<commit_msg>Update exception handling for levels argument<commit_after>
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
return
Update exception handling for levels argument"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
<commit_before>"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
return
<commit_msg>Update exception handling for levels argument<commit_after>"""
scrapple.utils.exceptions
~~~~~~~~~~~~~~~~~~~~~~~~~
Functions related to handling exceptions in the input arguments
"""
import re
def handle_exceptions(args):
"""
Validates the arguments passed through the CLI commands.
:param args: The arguments passed in the CLI, parsed by the docopt module
:return: None
"""
projectname_re = re.compile(r'[^a-zA-Z0-9_]')
if args['genconfig']:
if args['--type'] not in ['scraper', 'crawler']:
raise Exception("--type has to be 'scraper' or 'crawler'")
if args['--selector'] not in ['xpath', 'css']:
raise Exception("--selector has to be 'xpath' or 'css'")
if args['generate'] or args['run']:
if args['--output_type'] not in ['json', 'csv']:
raise Exception("--output_type has to be 'json' or 'csv'")
if args['genconfig'] or args['generate'] or args['run']:
if projectname_re.search(args['<projectname>']) is not None:
raise Exception("<projectname> should consist of letters, digits or _")
if int(args['--levels']) < 1:
raise Exception("--levels should be greater than, or equal to 1")
return
|
b1a9f626b81082123e4df448ed12f992d005d0cc
|
flaskext/debugtoolbar/panels/template.py
|
flaskext/debugtoolbar/panels/template.py
|
from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Timer'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
|
from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Template'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
|
Fix bug in timer panel display
|
Fix bug in timer panel display
|
Python
|
bsd-3-clause
|
lepture/flask-debugtoolbar,lepture/flask-debugtoolbar,dianchang/flask-debugtoolbar,dianchang/flask-debugtoolbar,dianchang/flask-debugtoolbar
|
from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Timer'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
Fix bug in timer panel display
|
from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Template'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
|
<commit_before>from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Timer'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
<commit_msg>Fix bug in timer panel display<commit_after>
|
from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Template'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
|
from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Timer'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
Fix bug in timer panel displayfrom flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Template'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
|
<commit_before>from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Timer'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
<commit_msg>Fix bug in timer panel display<commit_after>from flask import template_rendered
from flaskext.debugtoolbar.panels import DebugPanel
_ = lambda x: x
class TemplateDebugPanel(DebugPanel):
"""
Panel that displays the time a response took in milliseconds.
"""
name = 'Template'
has_content = True
def __init__(self, *args, **kwargs):
super(self.__class__, self).__init__(*args, **kwargs)
self.templates = []
template_rendered.connect(self._store_template_info)
def _store_template_info(self, sender, **kwargs):
self.templates.append(kwargs)
def process_request(self, request):
pass
def process_response(self, request, response):
pass
def nav_title(self):
return _('Templates')
def nav_subtitle(self):
return "%d rendered" % len(self.templates)
def title(self):
return _('Templates')
def url(self):
return ''
def content(self):
return self.render('panels/template.html', {
'templates': self.templates
})
|
0041b029cc9b55084c89a9875de5e85728b9083c
|
src/birding/spout.py
|
src/birding/spout.py
|
from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
urls = [
'http://www.parsely.com/',
'http://streamparse.readthedocs.org/',
'https://pypi.python.org/pypi/streamparse',
]
def initialize(self, stormconf, context):
self.url_seq = itertools.cycle(self.urls)
def next_tuple(self):
url = next(self.url_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([url, timestamp])
|
from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
terms = [
'real-time analytics',
'apache storm',
'pypi',
]
def initialize(self, stormconf, context):
self.term_seq = itertools.cycle(self.terms)
def next_tuple(self):
term = next(self.term_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([term, timestamp])
|
Change simulation to search terms.
|
Change simulation to search terms.
|
Python
|
apache-2.0
|
Parsely/birding,Parsely/birding
|
from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
urls = [
'http://www.parsely.com/',
'http://streamparse.readthedocs.org/',
'https://pypi.python.org/pypi/streamparse',
]
def initialize(self, stormconf, context):
self.url_seq = itertools.cycle(self.urls)
def next_tuple(self):
url = next(self.url_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([url, timestamp])
Change simulation to search terms.
|
from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
terms = [
'real-time analytics',
'apache storm',
'pypi',
]
def initialize(self, stormconf, context):
self.term_seq = itertools.cycle(self.terms)
def next_tuple(self):
term = next(self.term_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([term, timestamp])
|
<commit_before>from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
urls = [
'http://www.parsely.com/',
'http://streamparse.readthedocs.org/',
'https://pypi.python.org/pypi/streamparse',
]
def initialize(self, stormconf, context):
self.url_seq = itertools.cycle(self.urls)
def next_tuple(self):
url = next(self.url_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([url, timestamp])
<commit_msg>Change simulation to search terms.<commit_after>
|
from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
terms = [
'real-time analytics',
'apache storm',
'pypi',
]
def initialize(self, stormconf, context):
self.term_seq = itertools.cycle(self.terms)
def next_tuple(self):
term = next(self.term_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([term, timestamp])
|
from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
urls = [
'http://www.parsely.com/',
'http://streamparse.readthedocs.org/',
'https://pypi.python.org/pypi/streamparse',
]
def initialize(self, stormconf, context):
self.url_seq = itertools.cycle(self.urls)
def next_tuple(self):
url = next(self.url_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([url, timestamp])
Change simulation to search terms.from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
terms = [
'real-time analytics',
'apache storm',
'pypi',
]
def initialize(self, stormconf, context):
self.term_seq = itertools.cycle(self.terms)
def next_tuple(self):
term = next(self.term_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([term, timestamp])
|
<commit_before>from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
urls = [
'http://www.parsely.com/',
'http://streamparse.readthedocs.org/',
'https://pypi.python.org/pypi/streamparse',
]
def initialize(self, stormconf, context):
self.url_seq = itertools.cycle(self.urls)
def next_tuple(self):
url = next(self.url_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([url, timestamp])
<commit_msg>Change simulation to search terms.<commit_after>from __future__ import absolute_import, print_function
import datetime
import itertools
from streamparse.spout import Spout
class SimpleSimulationSpout(Spout):
terms = [
'real-time analytics',
'apache storm',
'pypi',
]
def initialize(self, stormconf, context):
self.term_seq = itertools.cycle(self.terms)
def next_tuple(self):
term = next(self.term_seq)
timestamp = datetime.datetime.now().isoformat()
self.emit([term, timestamp])
|
80650a2f32ce8e3de4c26f2bc3fce4bab34cb36f
|
test/__init__.py
|
test/__init__.py
|
# -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
|
# -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
if URL.endswith('/'):
URL = URL[:-1]
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
|
Normalize URL for test cases.
|
Normalize URL for test cases.
|
Python
|
apache-2.0
|
deconst/submitter,deconst/submitter
|
# -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
Normalize URL for test cases.
|
# -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
if URL.endswith('/'):
URL = URL[:-1]
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
|
<commit_before># -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
<commit_msg>Normalize URL for test cases.<commit_after>
|
# -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
if URL.endswith('/'):
URL = URL[:-1]
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
|
# -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
Normalize URL for test cases.# -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
if URL.endswith('/'):
URL = URL[:-1]
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
|
<commit_before># -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
<commit_msg>Normalize URL for test cases.<commit_after># -*- coding: utf-8 -*-
import os
from betamax import Betamax
from betamax_serializers import pretty_json
URL = os.environ.get('CONTENT_SERVICE_URL', 'http://dockerdev:9000')
APIKEY = os.environ.get('CONTENT_SERVICE_APIKEY', '12341234')
if URL.endswith('/'):
URL = URL[:-1]
Betamax.register_serializer(pretty_json.PrettyJSONSerializer)
with Betamax.configure() as config:
config.cassette_library_dir = 'test/fixtures/cassettes'
config.define_cassette_placeholder('<APIKEY>', APIKEY)
config.default_cassette_options['serialize_with'] = 'prettyjson'
|
36a915d5c116ac9c1067ba1cdd079d0c27054b7e
|
skimage/exposure/__init__.py
|
skimage/exposure/__init__.py
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution']
|
Add __all__ to exposure package
|
Add __all__ to exposure package
|
Python
|
bsd-3-clause
|
blink1073/scikit-image,SamHames/scikit-image,almarklein/scikit-image,jwiggins/scikit-image,vighneshbirodkar/scikit-image,warmspringwinds/scikit-image,keflavich/scikit-image,vighneshbirodkar/scikit-image,pratapvardhan/scikit-image,michaelaye/scikit-image,emon10005/scikit-image,ofgulban/scikit-image,chintak/scikit-image,robintw/scikit-image,robintw/scikit-image,rjeli/scikit-image,juliusbierk/scikit-image,youprofit/scikit-image,rjeli/scikit-image,Britefury/scikit-image,newville/scikit-image,warmspringwinds/scikit-image,chintak/scikit-image,almarklein/scikit-image,SamHames/scikit-image,Midafi/scikit-image,bennlich/scikit-image,paalge/scikit-image,blink1073/scikit-image,chriscrosscutler/scikit-image,chintak/scikit-image,dpshelio/scikit-image,GaZ3ll3/scikit-image,chintak/scikit-image,ClinicalGraphics/scikit-image,rjeli/scikit-image,juliusbierk/scikit-image,ClinicalGraphics/scikit-image,Midafi/scikit-image,michaelaye/scikit-image,Britefury/scikit-image,michaelpacer/scikit-image,chriscrosscutler/scikit-image,ajaybhat/scikit-image,almarklein/scikit-image,SamHames/scikit-image,oew1v07/scikit-image,bennlich/scikit-image,almarklein/scikit-image,emon10005/scikit-image,jwiggins/scikit-image,youprofit/scikit-image,paalge/scikit-image,paalge/scikit-image,WarrenWeckesser/scikits-image,pratapvardhan/scikit-image,michaelpacer/scikit-image,Hiyorimi/scikit-image,SamHames/scikit-image,vighneshbirodkar/scikit-image,oew1v07/scikit-image,dpshelio/scikit-image,newville/scikit-image,WarrenWeckesser/scikits-image,GaZ3ll3/scikit-image,ajaybhat/scikit-image,bsipocz/scikit-image,bsipocz/scikit-image,keflavich/scikit-image,Hiyorimi/scikit-image,ofgulban/scikit-image,ofgulban/scikit-image
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
Add __all__ to exposure package
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution']
|
<commit_before>from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
<commit_msg>Add __all__ to exposure package<commit_after>
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution']
|
from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
Add __all__ to exposure packagefrom .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution']
|
<commit_before>from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
<commit_msg>Add __all__ to exposure package<commit_after>from .exposure import histogram, equalize, equalize_hist, \
rescale_intensity, cumulative_distribution
from ._adapthist import equalize_adapthist
__all__ = ['histogram',
'equalize',
'equalize_hist',
'equalize_adapthist',
'rescale_intensity',
'cumulative_distribution']
|
81e3425bc6b2b9b35071afd7c14322f0dd52b418
|
oneanddone/tests/functional/tests/test_task_assignment.py
|
oneanddone/tests/functional/tests/test_task_assignment.py
|
# This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
# This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
# Check if assignable task is found
home_page.search_for_task(task.name)
assert len(available_tasks_page.available_tasks)
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
Add positive test for search
|
Add positive test for search
|
Python
|
mpl-2.0
|
VarnaSuresh/oneanddone,VarnaSuresh/oneanddone,VarnaSuresh/oneanddone,VarnaSuresh/oneanddone
|
# This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
Add positive test for search
|
# This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
# Check if assignable task is found
home_page.search_for_task(task.name)
assert len(available_tasks_page.available_tasks)
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
<commit_before># This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
<commit_msg>Add positive test for search<commit_after>
|
# This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
# Check if assignable task is found
home_page.search_for_task(task.name)
assert len(available_tasks_page.available_tasks)
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
# This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
Add positive test for search# This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
# Check if assignable task is found
home_page.search_for_task(task.name)
assert len(available_tasks_page.available_tasks)
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
<commit_before># This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
<commit_msg>Add positive test for search<commit_after># This Source Code Form is subjectfrom django import forms to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.home import HomePage
class TestAvailableTasks():
@pytest.mark.nondestructive
def test_assign_tasks(self, base_url, selenium, nonrepeatable_assigned_task, task, new_user):
home_page = HomePage(selenium, base_url).open()
home_page.login(new_user)
available_tasks_page = home_page.click_available_tasks()
# Check if assignable task is found
home_page.search_for_task(task.name)
assert len(available_tasks_page.available_tasks)
home_page.search_for_task(nonrepeatable_assigned_task.name)
assert len(available_tasks_page.available_tasks) == 0
|
62047d430b1eef61999599b4848237a31f28deae
|
testprodapp/testapp/admin.py
|
testprodapp/testapp/admin.py
|
from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import patterns
from django.conf.urls import url
from django.shortcuts import redirect
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
|
from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import url
from django.utils.decorators import method_decorator
from django.contrib.admin.views.decorators import staff_member_required
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
@method_decorator(staff_member_required)
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
|
Make sure the trigger view is locked down
|
Make sure the trigger view is locked down
|
Python
|
bsd-3-clause
|
grzes/djangae,potatolondon/djangae,grzes/djangae,potatolondon/djangae,grzes/djangae
|
from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import patterns
from django.conf.urls import url
from django.shortcuts import redirect
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
Make sure the trigger view is locked down
|
from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import url
from django.utils.decorators import method_decorator
from django.contrib.admin.views.decorators import staff_member_required
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
@method_decorator(staff_member_required)
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
|
<commit_before>from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import patterns
from django.conf.urls import url
from django.shortcuts import redirect
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
<commit_msg>Make sure the trigger view is locked down<commit_after>
|
from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import url
from django.utils.decorators import method_decorator
from django.contrib.admin.views.decorators import staff_member_required
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
@method_decorator(staff_member_required)
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
|
from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import patterns
from django.conf.urls import url
from django.shortcuts import redirect
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
Make sure the trigger view is locked downfrom django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import url
from django.utils.decorators import method_decorator
from django.contrib.admin.views.decorators import staff_member_required
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
@method_decorator(staff_member_required)
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
|
<commit_before>from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import patterns
from django.conf.urls import url
from django.shortcuts import redirect
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
<commit_msg>Make sure the trigger view is locked down<commit_after>from django.contrib import admin
from django.http import HttpResponseRedirect, Http404
from django.conf.urls import url
from django.utils.decorators import method_decorator
from django.contrib.admin.views.decorators import staff_member_required
from .models import TestResult
from .prod_tests.entity_counting_test import test_entity_count_vs_length
class TestResultAdmin(admin.ModelAdmin):
list_display = (
'name',
'django_version',
'djangae_version',
'last_modified',
'status',
'score'
)
class TestAdminSite(admin.AdminSite):
index_template = "testapp/admin_index.html"
def __init__(self, *args, **kwargs):
self.tests = {
"Counting Performance": test_entity_count_vs_length
}
super(TestAdminSite, self).__init__(*args, **kwargs)
def each_context(self, request):
return {
"admin_site": self
}
def get_urls(self):
urls = super(TestAdminSite, self).get_urls()
my_urls = [
url(r'^trigger/$', self.trigger_test),
]
return my_urls + urls
@method_decorator(staff_member_required)
def trigger_test(self, request):
try:
test = self.tests[request.POST["name"]]
except KeyError:
raise Http404("Invalid test")
test()
return HttpResponseRedirect("/admin/")
admin_site = TestAdminSite()
admin_site.register(TestResult, TestResultAdmin)
|
4ca4cfde2daceced65aa0dd4a7bf0226f23efc33
|
byceps/config_defaults.py
|
byceps/config_defaults.py
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
Add missing import of `Path`
|
Add missing import of `Path`
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
Add missing import of `Path`
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
<commit_before>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
<commit_msg>Add missing import of `Path`<commit_after>
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
Add missing import of `Path`"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
<commit_before>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
<commit_msg>Add missing import of `Path`<commit_after>"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
bde67aa0ad7aec9c281da65a59e8e8ab82b75918
|
app/cogs/admin.py
|
app/cogs/admin.py
|
from discord.ext import commands
from discord.ext.commands import Bot
from discord.ext.commands import Context
import checks
class Admin:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
@checks.is_owner()
async def my_id(self, ctx: Context):
"""Temporary command to show off checks.is_owner"""
await self.bot.say(ctx.message.author.id)
def setup(bot: Bot):
bot.add_cog(Admin(bot))
|
import logging
from discord import ClientException
from discord.ext import commands
from discord.ext.commands import Bot, Context
from discord.ext.commands import UserInputError
import checks
logger = logging.getLogger(__name__)
class Admin:
def __init__(self, bot: Bot):
self.bot = bot
@commands.group(pass_context=True, hidden=True)
async def extension(self, ctx: Context):
logger.info('%s called admin command %s' % (ctx.message.author, ctx.invoked_subcommand))
@extension.command()
@checks.is_owner()
async def load(self, cog: str):
try:
self.bot.load_extension('cogs.' + cog)
logger.info('Loaded extension: %s' % cog)
await self.bot.say('**[Admin]** Loaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
@extension.command()
@checks.is_owner()
async def unload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
logger.info('Unloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Unloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
@extension.command()
@checks.is_owner()
async def reload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
self.bot.load_extension('cogs.' + cog)
logger.info('Reloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Reloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
def setup(bot: Bot):
bot.add_cog(Admin(bot))
|
Add extension load/unload commands (owner only)
|
Add extension load/unload commands (owner only)
|
Python
|
mit
|
andrewlin16/duckbot,andrewlin16/duckbot
|
from discord.ext import commands
from discord.ext.commands import Bot
from discord.ext.commands import Context
import checks
class Admin:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
@checks.is_owner()
async def my_id(self, ctx: Context):
"""Temporary command to show off checks.is_owner"""
await self.bot.say(ctx.message.author.id)
def setup(bot: Bot):
bot.add_cog(Admin(bot))
Add extension load/unload commands (owner only)
|
import logging
from discord import ClientException
from discord.ext import commands
from discord.ext.commands import Bot, Context
from discord.ext.commands import UserInputError
import checks
logger = logging.getLogger(__name__)
class Admin:
def __init__(self, bot: Bot):
self.bot = bot
@commands.group(pass_context=True, hidden=True)
async def extension(self, ctx: Context):
logger.info('%s called admin command %s' % (ctx.message.author, ctx.invoked_subcommand))
@extension.command()
@checks.is_owner()
async def load(self, cog: str):
try:
self.bot.load_extension('cogs.' + cog)
logger.info('Loaded extension: %s' % cog)
await self.bot.say('**[Admin]** Loaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
@extension.command()
@checks.is_owner()
async def unload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
logger.info('Unloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Unloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
@extension.command()
@checks.is_owner()
async def reload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
self.bot.load_extension('cogs.' + cog)
logger.info('Reloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Reloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
def setup(bot: Bot):
bot.add_cog(Admin(bot))
|
<commit_before>from discord.ext import commands
from discord.ext.commands import Bot
from discord.ext.commands import Context
import checks
class Admin:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
@checks.is_owner()
async def my_id(self, ctx: Context):
"""Temporary command to show off checks.is_owner"""
await self.bot.say(ctx.message.author.id)
def setup(bot: Bot):
bot.add_cog(Admin(bot))
<commit_msg>Add extension load/unload commands (owner only)<commit_after>
|
import logging
from discord import ClientException
from discord.ext import commands
from discord.ext.commands import Bot, Context
from discord.ext.commands import UserInputError
import checks
logger = logging.getLogger(__name__)
class Admin:
def __init__(self, bot: Bot):
self.bot = bot
@commands.group(pass_context=True, hidden=True)
async def extension(self, ctx: Context):
logger.info('%s called admin command %s' % (ctx.message.author, ctx.invoked_subcommand))
@extension.command()
@checks.is_owner()
async def load(self, cog: str):
try:
self.bot.load_extension('cogs.' + cog)
logger.info('Loaded extension: %s' % cog)
await self.bot.say('**[Admin]** Loaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
@extension.command()
@checks.is_owner()
async def unload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
logger.info('Unloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Unloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
@extension.command()
@checks.is_owner()
async def reload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
self.bot.load_extension('cogs.' + cog)
logger.info('Reloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Reloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
def setup(bot: Bot):
bot.add_cog(Admin(bot))
|
from discord.ext import commands
from discord.ext.commands import Bot
from discord.ext.commands import Context
import checks
class Admin:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
@checks.is_owner()
async def my_id(self, ctx: Context):
"""Temporary command to show off checks.is_owner"""
await self.bot.say(ctx.message.author.id)
def setup(bot: Bot):
bot.add_cog(Admin(bot))
Add extension load/unload commands (owner only)import logging
from discord import ClientException
from discord.ext import commands
from discord.ext.commands import Bot, Context
from discord.ext.commands import UserInputError
import checks
logger = logging.getLogger(__name__)
class Admin:
def __init__(self, bot: Bot):
self.bot = bot
@commands.group(pass_context=True, hidden=True)
async def extension(self, ctx: Context):
logger.info('%s called admin command %s' % (ctx.message.author, ctx.invoked_subcommand))
@extension.command()
@checks.is_owner()
async def load(self, cog: str):
try:
self.bot.load_extension('cogs.' + cog)
logger.info('Loaded extension: %s' % cog)
await self.bot.say('**[Admin]** Loaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
@extension.command()
@checks.is_owner()
async def unload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
logger.info('Unloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Unloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
@extension.command()
@checks.is_owner()
async def reload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
self.bot.load_extension('cogs.' + cog)
logger.info('Reloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Reloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
def setup(bot: Bot):
bot.add_cog(Admin(bot))
|
<commit_before>from discord.ext import commands
from discord.ext.commands import Bot
from discord.ext.commands import Context
import checks
class Admin:
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
@checks.is_owner()
async def my_id(self, ctx: Context):
"""Temporary command to show off checks.is_owner"""
await self.bot.say(ctx.message.author.id)
def setup(bot: Bot):
bot.add_cog(Admin(bot))
<commit_msg>Add extension load/unload commands (owner only)<commit_after>import logging
from discord import ClientException
from discord.ext import commands
from discord.ext.commands import Bot, Context
from discord.ext.commands import UserInputError
import checks
logger = logging.getLogger(__name__)
class Admin:
def __init__(self, bot: Bot):
self.bot = bot
@commands.group(pass_context=True, hidden=True)
async def extension(self, ctx: Context):
logger.info('%s called admin command %s' % (ctx.message.author, ctx.invoked_subcommand))
@extension.command()
@checks.is_owner()
async def load(self, cog: str):
try:
self.bot.load_extension('cogs.' + cog)
logger.info('Loaded extension: %s' % cog)
await self.bot.say('**[Admin]** Loaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
@extension.command()
@checks.is_owner()
async def unload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
logger.info('Unloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Unloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
@extension.command()
@checks.is_owner()
async def reload(self, cog: str):
try:
self.bot.unload_extension('cogs.' + cog)
self.bot.load_extension('cogs.' + cog)
logger.info('Reloaded extension: %s' % cog)
await self.bot.say('**[Admin]** Reloaded extension: %s' % cog)
except UserInputError:
await self.bot.say("**[Admin]** incorrect arguments")
except ClientException:
await self.bot.say("**[Admin]** Extension could not be loaded")
def setup(bot: Bot):
bot.add_cog(Admin(bot))
|
f9af714b63f4c3c8370a5f2cffbbd7dfb6dc3181
|
nova/tests/scheduler/__init__.py
|
nova/tests/scheduler/__init__.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Openstack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work
from nova.tests import *
|
Make sure test setup is run for subdirectories
|
Make sure test setup is run for subdirectories
|
Python
|
apache-2.0
|
n0ano/gantt,n0ano/gantt
|
Make sure test setup is run for subdirectories
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Openstack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work
from nova.tests import *
|
<commit_before><commit_msg>Make sure test setup is run for subdirectories<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Openstack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work
from nova.tests import *
|
Make sure test setup is run for subdirectories# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Openstack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work
from nova.tests import *
|
<commit_before><commit_msg>Make sure test setup is run for subdirectories<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Openstack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# NOTE(vish): this forces the fixtures from tests/__init.py:setup() to work
from nova.tests import *
|
|
483ba69bca57899054270cb24c41b0d2c01e7ff0
|
opentreemap/stormwater/models.py
|
opentreemap/stormwater/models.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
pass
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
collection_udf_defaults = {
'Stewardship': [
{'name': 'Action',
'choices': ['Watered',
'Pruned',
'Mulched, Had Compost Added, or Soil Amended',
'Cleared of Trash or Debris'],
'type': 'choice'},
{'type': 'date',
'name': 'Date'}],
}
|
Add placeholder defaults for bioswale stewardship
|
Add placeholder defaults for bioswale stewardship
|
Python
|
agpl-3.0
|
clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,recklessromeo/otm-core,maurizi/otm-core,recklessromeo/otm-core,RickMohr/otm-core,RickMohr/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,maurizi/otm-core,maurizi/otm-core,maurizi/otm-core,recklessromeo/otm-core
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
pass
Add placeholder defaults for bioswale stewardship
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
collection_udf_defaults = {
'Stewardship': [
{'name': 'Action',
'choices': ['Watered',
'Pruned',
'Mulched, Had Compost Added, or Soil Amended',
'Cleared of Trash or Debris'],
'type': 'choice'},
{'type': 'date',
'name': 'Date'}],
}
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
pass
<commit_msg>Add placeholder defaults for bioswale stewardship<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
collection_udf_defaults = {
'Stewardship': [
{'name': 'Action',
'choices': ['Watered',
'Pruned',
'Mulched, Had Compost Added, or Soil Amended',
'Cleared of Trash or Debris'],
'type': 'choice'},
{'type': 'date',
'name': 'Date'}],
}
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
pass
Add placeholder defaults for bioswale stewardship# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
collection_udf_defaults = {
'Stewardship': [
{'name': 'Action',
'choices': ['Watered',
'Pruned',
'Mulched, Had Compost Added, or Soil Amended',
'Cleared of Trash or Debris'],
'type': 'choice'},
{'type': 'date',
'name': 'Date'}],
}
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
pass
<commit_msg>Add placeholder defaults for bioswale stewardship<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.contrib.gis.db import models
from treemap.models import MapFeature
class PolygonalMapFeature(MapFeature):
area_field_name = 'polygon'
skip_detail_form = True
polygon = models.MultiPolygonField(srid=3857)
class Bioswale(PolygonalMapFeature):
collection_udf_defaults = {
'Stewardship': [
{'name': 'Action',
'choices': ['Watered',
'Pruned',
'Mulched, Had Compost Added, or Soil Amended',
'Cleared of Trash or Debris'],
'type': 'choice'},
{'type': 'date',
'name': 'Date'}],
}
|
7481aede4fffc9bc6cf307a70d6d96f2eabbe0be
|
floo/shared.py
|
floo/shared.py
|
__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
AGENT = None
|
__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
TICK_TIME = 50
AGENT = None
|
Make tick time a global
|
Make tick time a global
|
Python
|
apache-2.0
|
Floobits/floobits-sublime,Floobits/floobits-sublime
|
__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
AGENT = None
Make tick time a global
|
__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
TICK_TIME = 50
AGENT = None
|
<commit_before>__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
AGENT = None
<commit_msg>Make tick time a global<commit_after>
|
__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
TICK_TIME = 50
AGENT = None
|
__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
AGENT = None
Make tick time a global__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
TICK_TIME = 50
AGENT = None
|
<commit_before>__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
AGENT = None
<commit_msg>Make tick time a global<commit_after>__VERSION__ = '0.02'
DEBUG = False
PLUGIN_PATH = None
MAX_RETRIES = 20
INITIAL_RECONNECT_DELAY = 500 # milliseconds
CONNECTED = False
COLAB_DIR = ''
PROJECT_PATH = ''
DEFAULT_HOST = ''
DEFAULT_PORT = ''
SECURE = True
USERNAME = ''
SECRET = ''
ALERT_ON_MSG = True
PERMS = []
WORKSPACE_WINDOW = None
CHAT_VIEW = None
CHAT_VIEW_PATH = None
STALKER_MODE = False
IGNORE_MODIFIED_EVENTS = False
TICK_TIME = 50
AGENT = None
|
d637df6df273c2d07cbb834cf2729a4251ee2ff1
|
cyder/base/forms.py
|
cyder/base/forms.py
|
from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug: - (required)", required=True)
description = forms.CharField(
label="Description: - (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
|
from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug (required)", required=True)
description = forms.CharField(
label="Description (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
|
Remove redundant colons and superfluous hyphens
|
Remove redundant colons and superfluous hyphens
|
Python
|
bsd-3-clause
|
OSU-Net/cyder,akeym/cyder,zeeman/cyder,zeeman/cyder,drkitty/cyder,murrown/cyder,drkitty/cyder,drkitty/cyder,murrown/cyder,akeym/cyder,akeym/cyder,OSU-Net/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,akeym/cyder,OSU-Net/cyder,murrown/cyder,zeeman/cyder,zeeman/cyder
|
from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug: - (required)", required=True)
description = forms.CharField(
label="Description: - (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
Remove redundant colons and superfluous hyphens
|
from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug (required)", required=True)
description = forms.CharField(
label="Description (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
|
<commit_before>from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug: - (required)", required=True)
description = forms.CharField(
label="Description: - (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
<commit_msg>Remove redundant colons and superfluous hyphens<commit_after>
|
from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug (required)", required=True)
description = forms.CharField(
label="Description (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
|
from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug: - (required)", required=True)
description = forms.CharField(
label="Description: - (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
Remove redundant colons and superfluous hyphensfrom django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug (required)", required=True)
description = forms.CharField(
label="Description (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
|
<commit_before>from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug: - (required)", required=True)
description = forms.CharField(
label="Description: - (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
<commit_msg>Remove redundant colons and superfluous hyphens<commit_after>from django import forms
class BugReportForm(forms.Form):
bug = forms.CharField(label="Bug (required)", required=True)
description = forms.CharField(
label="Description (required)",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=True)
reproduce = forms.CharField(
label="How to reproduce the error",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
expected = forms.CharField(
label="The expected result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
actual = forms.CharField(
label="The actual result",
widget=forms.Textarea(attrs={'rows': 4, 'cols': 50}), required=False)
session_data = forms.CharField(widget=forms.HiddenInput())
|
36b771ce3028200f57255633dbfa4f6b991e1674
|
fuckit_commit.py
|
fuckit_commit.py
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def get_configuration():
'''
Set Twilio configuration
'''
pass
def get_twilio_client(config):
'''
Connect to Twilio Client
'''
return TwilioRestClient(config.account_sid, config.auth_token)
def send_sms(client):
'''
Send SMS reminder
'''
pass
def main():
config = get_configuration()
client = get_configuration(config)
send_sms(client)
if __name__ == "__main__":
main()
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="+15148871900", from_="+14387937890",
body="Hello there!")
def main():
send_sms()
if __name__ == "__main__":
main()
|
Add code to send sms
|
Add code to send sms
|
Python
|
mit
|
ueg1990/fuckit_commit
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def get_configuration():
'''
Set Twilio configuration
'''
pass
def get_twilio_client(config):
'''
Connect to Twilio Client
'''
return TwilioRestClient(config.account_sid, config.auth_token)
def send_sms(client):
'''
Send SMS reminder
'''
pass
def main():
config = get_configuration()
client = get_configuration(config)
send_sms(client)
if __name__ == "__main__":
main()
Add code to send sms
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="+15148871900", from_="+14387937890",
body="Hello there!")
def main():
send_sms()
if __name__ == "__main__":
main()
|
<commit_before>'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def get_configuration():
'''
Set Twilio configuration
'''
pass
def get_twilio_client(config):
'''
Connect to Twilio Client
'''
return TwilioRestClient(config.account_sid, config.auth_token)
def send_sms(client):
'''
Send SMS reminder
'''
pass
def main():
config = get_configuration()
client = get_configuration(config)
send_sms(client)
if __name__ == "__main__":
main()
<commit_msg>Add code to send sms<commit_after>
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="+15148871900", from_="+14387937890",
body="Hello there!")
def main():
send_sms()
if __name__ == "__main__":
main()
|
'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def get_configuration():
'''
Set Twilio configuration
'''
pass
def get_twilio_client(config):
'''
Connect to Twilio Client
'''
return TwilioRestClient(config.account_sid, config.auth_token)
def send_sms(client):
'''
Send SMS reminder
'''
pass
def main():
config = get_configuration()
client = get_configuration(config)
send_sms(client)
if __name__ == "__main__":
main()
Add code to send sms'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="+15148871900", from_="+14387937890",
body="Hello there!")
def main():
send_sms()
if __name__ == "__main__":
main()
|
<commit_before>'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def get_configuration():
'''
Set Twilio configuration
'''
pass
def get_twilio_client(config):
'''
Connect to Twilio Client
'''
return TwilioRestClient(config.account_sid, config.auth_token)
def send_sms(client):
'''
Send SMS reminder
'''
pass
def main():
config = get_configuration()
client = get_configuration(config)
send_sms(client)
if __name__ == "__main__":
main()
<commit_msg>Add code to send sms<commit_after>'''
This module will send SMS reminders periodically, using Twilio.
The aim is to encourage user to code, commit and push to GitHub everyday
'''
import requests
from twilio.rest import TwilioRestClient
def send_sms():
'''
Send SMS reminder
'''
config = {'account_sid' : '', 'auth_token' : ''}
client = TwilioRestClient(config['account_sid'], config['auth_token'])
message = client.messages.create(to="+15148871900", from_="+14387937890",
body="Hello there!")
def main():
send_sms()
if __name__ == "__main__":
main()
|
945d64464857581052e18d79e62a6fde8bdecb9b
|
fabfile.py
|
fabfile.py
|
import sys
from fabric.api import local, task
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
|
import sys
from pathlib import Path
from fabric.api import local, task, lcd, env
from fabric.contrib.console import confirm
from fabric.utils import abort
src_p = Path(env.real_fabfile).parent / 'src'
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
@task
def backup():
cmd_dumpdata = 'python manage.py dumpdata '
with lcd(src_p):
local(
cmd_dumpdata + 'users.EmailUser data_sources.DataSource | '
'tee ../db_dump/user_sources.json'
)
local(
cmd_dumpdata + 'experiments | '
'tee ../db_dump/experiments.json'
)
local(
cmd_dumpdata + 'analyses.GenomeReference | '
'tee ../db_dump/genome_reference.json'
)
@task
def reborn():
with lcd(src_p.as_posix()):
db_dump_dir = Path(env.cwd, '../db_dump')
if not (
db_dump_dir.joinpath('user_sources.json').exists() and
db_dump_dir.joinpath('genome_reference.json').exists() and
db_dump_dir.joinpath('experiments.json').exists()
):
abort('Backup the import database content first!')
confirm('Destory and re-create the current database?', False)
local('dropdb biocloud')
local('createdb biocloud')
local('python manage.py migrate')
local('python manage.py loaddata ../db_dump/user_sources.json')
local('python manage.py loaddata ../db_dump/genome_reference.json')
local('python manage.py loaddata ../db_dump/experiments.json')
|
Add fab command to backup and destroy database
|
Add fab command to backup and destroy database
|
Python
|
mit
|
ccwang002/biocloud-server-kai,ccwang002/biocloud-server-kai,ccwang002/biocloud-server-kai
|
import sys
from fabric.api import local, task
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
Add fab command to backup and destroy database
|
import sys
from pathlib import Path
from fabric.api import local, task, lcd, env
from fabric.contrib.console import confirm
from fabric.utils import abort
src_p = Path(env.real_fabfile).parent / 'src'
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
@task
def backup():
cmd_dumpdata = 'python manage.py dumpdata '
with lcd(src_p):
local(
cmd_dumpdata + 'users.EmailUser data_sources.DataSource | '
'tee ../db_dump/user_sources.json'
)
local(
cmd_dumpdata + 'experiments | '
'tee ../db_dump/experiments.json'
)
local(
cmd_dumpdata + 'analyses.GenomeReference | '
'tee ../db_dump/genome_reference.json'
)
@task
def reborn():
with lcd(src_p.as_posix()):
db_dump_dir = Path(env.cwd, '../db_dump')
if not (
db_dump_dir.joinpath('user_sources.json').exists() and
db_dump_dir.joinpath('genome_reference.json').exists() and
db_dump_dir.joinpath('experiments.json').exists()
):
abort('Backup the import database content first!')
confirm('Destory and re-create the current database?', False)
local('dropdb biocloud')
local('createdb biocloud')
local('python manage.py migrate')
local('python manage.py loaddata ../db_dump/user_sources.json')
local('python manage.py loaddata ../db_dump/genome_reference.json')
local('python manage.py loaddata ../db_dump/experiments.json')
|
<commit_before>import sys
from fabric.api import local, task
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
<commit_msg>Add fab command to backup and destroy database<commit_after>
|
import sys
from pathlib import Path
from fabric.api import local, task, lcd, env
from fabric.contrib.console import confirm
from fabric.utils import abort
src_p = Path(env.real_fabfile).parent / 'src'
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
@task
def backup():
cmd_dumpdata = 'python manage.py dumpdata '
with lcd(src_p):
local(
cmd_dumpdata + 'users.EmailUser data_sources.DataSource | '
'tee ../db_dump/user_sources.json'
)
local(
cmd_dumpdata + 'experiments | '
'tee ../db_dump/experiments.json'
)
local(
cmd_dumpdata + 'analyses.GenomeReference | '
'tee ../db_dump/genome_reference.json'
)
@task
def reborn():
with lcd(src_p.as_posix()):
db_dump_dir = Path(env.cwd, '../db_dump')
if not (
db_dump_dir.joinpath('user_sources.json').exists() and
db_dump_dir.joinpath('genome_reference.json').exists() and
db_dump_dir.joinpath('experiments.json').exists()
):
abort('Backup the import database content first!')
confirm('Destory and re-create the current database?', False)
local('dropdb biocloud')
local('createdb biocloud')
local('python manage.py migrate')
local('python manage.py loaddata ../db_dump/user_sources.json')
local('python manage.py loaddata ../db_dump/genome_reference.json')
local('python manage.py loaddata ../db_dump/experiments.json')
|
import sys
from fabric.api import local, task
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
Add fab command to backup and destroy databaseimport sys
from pathlib import Path
from fabric.api import local, task, lcd, env
from fabric.contrib.console import confirm
from fabric.utils import abort
src_p = Path(env.real_fabfile).parent / 'src'
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
@task
def backup():
cmd_dumpdata = 'python manage.py dumpdata '
with lcd(src_p):
local(
cmd_dumpdata + 'users.EmailUser data_sources.DataSource | '
'tee ../db_dump/user_sources.json'
)
local(
cmd_dumpdata + 'experiments | '
'tee ../db_dump/experiments.json'
)
local(
cmd_dumpdata + 'analyses.GenomeReference | '
'tee ../db_dump/genome_reference.json'
)
@task
def reborn():
with lcd(src_p.as_posix()):
db_dump_dir = Path(env.cwd, '../db_dump')
if not (
db_dump_dir.joinpath('user_sources.json').exists() and
db_dump_dir.joinpath('genome_reference.json').exists() and
db_dump_dir.joinpath('experiments.json').exists()
):
abort('Backup the import database content first!')
confirm('Destory and re-create the current database?', False)
local('dropdb biocloud')
local('createdb biocloud')
local('python manage.py migrate')
local('python manage.py loaddata ../db_dump/user_sources.json')
local('python manage.py loaddata ../db_dump/genome_reference.json')
local('python manage.py loaddata ../db_dump/experiments.json')
|
<commit_before>import sys
from fabric.api import local, task
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
<commit_msg>Add fab command to backup and destroy database<commit_after>import sys
from pathlib import Path
from fabric.api import local, task, lcd, env
from fabric.contrib.console import confirm
from fabric.utils import abort
src_p = Path(env.real_fabfile).parent / 'src'
@task
def start_db():
if sys.platform.startswith('darwin'):
# Mac OSX
local('postgres -D /usr/local/var/postgres -s')
@task
def backup():
cmd_dumpdata = 'python manage.py dumpdata '
with lcd(src_p):
local(
cmd_dumpdata + 'users.EmailUser data_sources.DataSource | '
'tee ../db_dump/user_sources.json'
)
local(
cmd_dumpdata + 'experiments | '
'tee ../db_dump/experiments.json'
)
local(
cmd_dumpdata + 'analyses.GenomeReference | '
'tee ../db_dump/genome_reference.json'
)
@task
def reborn():
with lcd(src_p.as_posix()):
db_dump_dir = Path(env.cwd, '../db_dump')
if not (
db_dump_dir.joinpath('user_sources.json').exists() and
db_dump_dir.joinpath('genome_reference.json').exists() and
db_dump_dir.joinpath('experiments.json').exists()
):
abort('Backup the import database content first!')
confirm('Destory and re-create the current database?', False)
local('dropdb biocloud')
local('createdb biocloud')
local('python manage.py migrate')
local('python manage.py loaddata ../db_dump/user_sources.json')
local('python manage.py loaddata ../db_dump/genome_reference.json')
local('python manage.py loaddata ../db_dump/experiments.json')
|
b9ef360f5ff1b24c274e1194c1272d953e780683
|
pymt/printers/nc/tests/test_ugrid_read.py
|
pymt/printers/nc/tests/test_ugrid_read.py
|
import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('http://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
|
import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('https://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
|
Use HTTPS for base URL to THREDDS server
|
Use HTTPS for base URL to THREDDS server
Last week OIT made csdms.colorado.edu only accessible by HTTPS, which
caused the HTTP URL for the THREDDS server to fail.
|
Python
|
mit
|
csdms/pymt,csdms/coupling,csdms/coupling
|
import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('http://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
Use HTTPS for base URL to THREDDS server
Last week OIT made csdms.colorado.edu only accessible by HTTPS, which
caused the HTTP URL for the THREDDS server to fail.
|
import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('https://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
|
<commit_before>import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('http://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
<commit_msg>Use HTTPS for base URL to THREDDS server
Last week OIT made csdms.colorado.edu only accessible by HTTPS, which
caused the HTTP URL for the THREDDS server to fail.<commit_after>
|
import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('https://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
|
import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('http://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
Use HTTPS for base URL to THREDDS server
Last week OIT made csdms.colorado.edu only accessible by HTTPS, which
caused the HTTP URL for the THREDDS server to fail.import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('https://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
|
<commit_before>import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('http://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
<commit_msg>Use HTTPS for base URL to THREDDS server
Last week OIT made csdms.colorado.edu only accessible by HTTPS, which
caused the HTTP URL for the THREDDS server to fail.<commit_after>import os
from pymt.printers.nc.read import field_fromfile
_BASE_URL_FOR_TEST_FILES = ('https://csdms.colorado.edu/thredds/fileServer'
'/benchmark/ugrid/')
_TMP_DIR = 'tmp'
def setup():
import tempfile
globals().update({
'_TMP_DIR': tempfile.mkdtemp(dir='.')
})
def teardown():
from shutil import rmtree
rmtree(_TMP_DIR)
def fetch_data_file(filename):
import urllib2
remote_file = urllib2.urlopen(_BASE_URL_FOR_TEST_FILES + filename)
local_file = os.path.join(_TMP_DIR, filename)
with open(local_file, 'w') as netcdf_file:
netcdf_file.write(remote_file.read())
return local_file
def test_unstructured_2d():
field = field_fromfile(fetch_data_file('unstructured.2d.nc'), fmt='NETCDF4')
def test_rectilinear_1d():
field = field_fromfile(fetch_data_file('rectilinear.1d.nc'), fmt='NETCDF4')
def test_rectilinear_2d():
field = field_fromfile(fetch_data_file('rectilinear.2d.nc'), fmt='NETCDF4')
def test_rectilinear_3d():
field = field_fromfile(fetch_data_file('rectilinear.3d.nc'), fmt='NETCDF4')
|
54f027ec79d9d819a23854dcd79d4f79848ff3ef
|
tokens/models.py
|
tokens/models.py
|
from django.db import models
# Create your models here.
|
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
class Token(models.Model):
public_name = models.CharField(max_length=200)
symbol = models.CharField(max_length=4)
decimals = models.IntergerField(
default=18,
validators=[MaxValueValidator(20), MinValueValidator(0)]
)
|
Create a dead simple Django model for storing token information
|
Create a dead simple Django model for storing token information
|
Python
|
apache-2.0
|
onyb/ethane,onyb/ethane,onyb/ethane,onyb/ethane
|
from django.db import models
# Create your models here.
Create a dead simple Django model for storing token information
|
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
class Token(models.Model):
public_name = models.CharField(max_length=200)
symbol = models.CharField(max_length=4)
decimals = models.IntergerField(
default=18,
validators=[MaxValueValidator(20), MinValueValidator(0)]
)
|
<commit_before>from django.db import models
# Create your models here.
<commit_msg>Create a dead simple Django model for storing token information<commit_after>
|
from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
class Token(models.Model):
public_name = models.CharField(max_length=200)
symbol = models.CharField(max_length=4)
decimals = models.IntergerField(
default=18,
validators=[MaxValueValidator(20), MinValueValidator(0)]
)
|
from django.db import models
# Create your models here.
Create a dead simple Django model for storing token informationfrom django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
class Token(models.Model):
public_name = models.CharField(max_length=200)
symbol = models.CharField(max_length=4)
decimals = models.IntergerField(
default=18,
validators=[MaxValueValidator(20), MinValueValidator(0)]
)
|
<commit_before>from django.db import models
# Create your models here.
<commit_msg>Create a dead simple Django model for storing token information<commit_after>from django.core.validators import MaxValueValidator, MinValueValidator
from django.db import models
class Token(models.Model):
public_name = models.CharField(max_length=200)
symbol = models.CharField(max_length=4)
decimals = models.IntergerField(
default=18,
validators=[MaxValueValidator(20), MinValueValidator(0)]
)
|
65fff1f471b132b97f7a5ef556603e8c2f511503
|
mail_optional_autofollow/__manifest__.py
|
mail_optional_autofollow/__manifest__.py
|
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "http://acsone.eu",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
|
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/social",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
|
Apply pre-commit changes: Resolve conflicts
|
[IMP] Apply pre-commit changes: Resolve conflicts
|
Python
|
agpl-3.0
|
OCA/social,OCA/social,OCA/social
|
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "http://acsone.eu",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
[IMP] Apply pre-commit changes: Resolve conflicts
|
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/social",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
|
<commit_before># Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "http://acsone.eu",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
<commit_msg>[IMP] Apply pre-commit changes: Resolve conflicts<commit_after>
|
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/social",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
|
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "http://acsone.eu",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
[IMP] Apply pre-commit changes: Resolve conflicts# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/social",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
|
<commit_before># Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "http://acsone.eu",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
<commit_msg>[IMP] Apply pre-commit changes: Resolve conflicts<commit_after># Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
"name": "Mail optional autofollow",
"summary": """
Choose if you want to automatically add new recipients as followers
on mail.compose.message""",
"author": "ACSONE SA/NV," "Odoo Community Association (OCA)",
"website": "https://github.com/OCA/social",
"category": "Social Network",
"version": "13.0.1.0.0",
"license": "AGPL-3",
"depends": ["mail"],
"data": ["wizard/mail_compose_message_view.xml"],
"installable": True,
}
|
c1b6357c4d6876caa081af0799ec6c7a189ad13f
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
from fabric.contrib.console import confirm
appengine_dir='appengine-web/src'
goldquest_dir='src'
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(path)s" % dict(path=goldquest_dir))
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(path)s/javascript/game.js --js_output_file %(path)s/javascript/game.min.js" % dict(path=appengine_dir))
def deploy_appengine():
local("appcfg.py update " + appengine_dir)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
|
from fabric.api import *
from fabric.contrib.console import confirm
cfg = dict(
appengine_dir='appengine-web/src',
goldquest_dir='src',
appengine_token='',
)
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(goldquest_dir)s" % cfg)
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(appengine_dir)s/javascript/game.js --js_output_file %(appengine_dir)s/javascript/game.min.js" % cfg)
def deploy_appengine():
local("appcfg.py --oauth2_refresh_token=%(appengine_token)s update %(appengine_dir)s" % cfg)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
|
Add support for appengine update oauth2 token in deploy script.
|
NEW: Add support for appengine update oauth2 token in deploy script.
|
Python
|
mit
|
ollej/GoldQuest,ollej/GoldQuest,ollej/GoldQuest,ollej/GoldQuest
|
from fabric.api import *
from fabric.contrib.console import confirm
appengine_dir='appengine-web/src'
goldquest_dir='src'
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(path)s" % dict(path=goldquest_dir))
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(path)s/javascript/game.js --js_output_file %(path)s/javascript/game.min.js" % dict(path=appengine_dir))
def deploy_appengine():
local("appcfg.py update " + appengine_dir)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
NEW: Add support for appengine update oauth2 token in deploy script.
|
from fabric.api import *
from fabric.contrib.console import confirm
cfg = dict(
appengine_dir='appengine-web/src',
goldquest_dir='src',
appengine_token='',
)
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(goldquest_dir)s" % cfg)
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(appengine_dir)s/javascript/game.js --js_output_file %(appengine_dir)s/javascript/game.min.js" % cfg)
def deploy_appengine():
local("appcfg.py --oauth2_refresh_token=%(appengine_token)s update %(appengine_dir)s" % cfg)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
|
<commit_before>from fabric.api import *
from fabric.contrib.console import confirm
appengine_dir='appengine-web/src'
goldquest_dir='src'
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(path)s" % dict(path=goldquest_dir))
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(path)s/javascript/game.js --js_output_file %(path)s/javascript/game.min.js" % dict(path=appengine_dir))
def deploy_appengine():
local("appcfg.py update " + appengine_dir)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
<commit_msg>NEW: Add support for appengine update oauth2 token in deploy script.<commit_after>
|
from fabric.api import *
from fabric.contrib.console import confirm
cfg = dict(
appengine_dir='appengine-web/src',
goldquest_dir='src',
appengine_token='',
)
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(goldquest_dir)s" % cfg)
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(appengine_dir)s/javascript/game.js --js_output_file %(appengine_dir)s/javascript/game.min.js" % cfg)
def deploy_appengine():
local("appcfg.py --oauth2_refresh_token=%(appengine_token)s update %(appengine_dir)s" % cfg)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
|
from fabric.api import *
from fabric.contrib.console import confirm
appengine_dir='appengine-web/src'
goldquest_dir='src'
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(path)s" % dict(path=goldquest_dir))
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(path)s/javascript/game.js --js_output_file %(path)s/javascript/game.min.js" % dict(path=appengine_dir))
def deploy_appengine():
local("appcfg.py update " + appengine_dir)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
NEW: Add support for appengine update oauth2 token in deploy script.from fabric.api import *
from fabric.contrib.console import confirm
cfg = dict(
appengine_dir='appengine-web/src',
goldquest_dir='src',
appengine_token='',
)
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(goldquest_dir)s" % cfg)
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(appengine_dir)s/javascript/game.js --js_output_file %(appengine_dir)s/javascript/game.min.js" % cfg)
def deploy_appengine():
local("appcfg.py --oauth2_refresh_token=%(appengine_token)s update %(appengine_dir)s" % cfg)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
|
<commit_before>from fabric.api import *
from fabric.contrib.console import confirm
appengine_dir='appengine-web/src'
goldquest_dir='src'
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(path)s" % dict(path=goldquest_dir))
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(path)s/javascript/game.js --js_output_file %(path)s/javascript/game.min.js" % dict(path=appengine_dir))
def deploy_appengine():
local("appcfg.py update " + appengine_dir)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
<commit_msg>NEW: Add support for appengine update oauth2 token in deploy script.<commit_after>from fabric.api import *
from fabric.contrib.console import confirm
cfg = dict(
appengine_dir='appengine-web/src',
goldquest_dir='src',
appengine_token='',
)
def update():
# update to latest code from repo
local('git pull')
def test():
local("nosetests -m 'Test|test_' -w %(goldquest_dir)s" % cfg)
# jslint
# pychecker
# run jasmine tests
def compile():
# Minimize javascript using google closure.
local("java -jar ~/bin/compiler.jar --js %(appengine_dir)s/javascript/game.js --js_output_file %(appengine_dir)s/javascript/game.min.js" % cfg)
def deploy_appengine():
local("appcfg.py --oauth2_refresh_token=%(appengine_token)s update %(appengine_dir)s" % cfg)
def prepare_deploy():
test()
compile()
def deploy():
update()
prepare_deploy()
deploy_appengine()
# tweet about release
|
1eeb6061cfc945ea84485e10fcf39062270c8945
|
hooks.py
|
hooks.py
|
#!/usr/bin/python
def get_secret_for_user(user, ipparam):
print "Looking up user %s with ipparam %s" % (user, ipparam)
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, ouraddr, hisaddr):
print "ip_up_notifier"
def ip_down_notifier(arg):
print "ip_down_notifier"
def auth_up_notifier(arg):
print "auth_up_notifier"
def link_down_notifier(arg):
print "link_down_notifier"
|
#!/usr/bin/env python
def get_secret_for_user(user, ipparam):
print("Looking up user %s with ipparam %s" % (user, ipparam))
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, localip, remoteip):
print("ip_up_notifier")
def ip_down_notifier(arg):
print("ip_down_notifier")
def auth_up_notifier(arg):
print("auth_up_notifier")
def link_down_notifier(arg):
print("link_down_notifier")
|
Fix print statements for Python 3
|
Fix print statements for Python 3
Although I haven't tested Python 3 for this, all print statements have
been updated to use parenthesis. There are also some minor fixes to the
names of the ip-up hook arguments.
|
Python
|
mit
|
metricube/pppd_pyhook,metricube/pppd_pyhook
|
#!/usr/bin/python
def get_secret_for_user(user, ipparam):
print "Looking up user %s with ipparam %s" % (user, ipparam)
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, ouraddr, hisaddr):
print "ip_up_notifier"
def ip_down_notifier(arg):
print "ip_down_notifier"
def auth_up_notifier(arg):
print "auth_up_notifier"
def link_down_notifier(arg):
print "link_down_notifier"
Fix print statements for Python 3
Although I haven't tested Python 3 for this, all print statements have
been updated to use parenthesis. There are also some minor fixes to the
names of the ip-up hook arguments.
|
#!/usr/bin/env python
def get_secret_for_user(user, ipparam):
print("Looking up user %s with ipparam %s" % (user, ipparam))
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, localip, remoteip):
print("ip_up_notifier")
def ip_down_notifier(arg):
print("ip_down_notifier")
def auth_up_notifier(arg):
print("auth_up_notifier")
def link_down_notifier(arg):
print("link_down_notifier")
|
<commit_before>#!/usr/bin/python
def get_secret_for_user(user, ipparam):
print "Looking up user %s with ipparam %s" % (user, ipparam)
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, ouraddr, hisaddr):
print "ip_up_notifier"
def ip_down_notifier(arg):
print "ip_down_notifier"
def auth_up_notifier(arg):
print "auth_up_notifier"
def link_down_notifier(arg):
print "link_down_notifier"
<commit_msg>Fix print statements for Python 3
Although I haven't tested Python 3 for this, all print statements have
been updated to use parenthesis. There are also some minor fixes to the
names of the ip-up hook arguments.<commit_after>
|
#!/usr/bin/env python
def get_secret_for_user(user, ipparam):
print("Looking up user %s with ipparam %s" % (user, ipparam))
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, localip, remoteip):
print("ip_up_notifier")
def ip_down_notifier(arg):
print("ip_down_notifier")
def auth_up_notifier(arg):
print("auth_up_notifier")
def link_down_notifier(arg):
print("link_down_notifier")
|
#!/usr/bin/python
def get_secret_for_user(user, ipparam):
print "Looking up user %s with ipparam %s" % (user, ipparam)
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, ouraddr, hisaddr):
print "ip_up_notifier"
def ip_down_notifier(arg):
print "ip_down_notifier"
def auth_up_notifier(arg):
print "auth_up_notifier"
def link_down_notifier(arg):
print "link_down_notifier"
Fix print statements for Python 3
Although I haven't tested Python 3 for this, all print statements have
been updated to use parenthesis. There are also some minor fixes to the
names of the ip-up hook arguments.#!/usr/bin/env python
def get_secret_for_user(user, ipparam):
print("Looking up user %s with ipparam %s" % (user, ipparam))
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, localip, remoteip):
print("ip_up_notifier")
def ip_down_notifier(arg):
print("ip_down_notifier")
def auth_up_notifier(arg):
print("auth_up_notifier")
def link_down_notifier(arg):
print("link_down_notifier")
|
<commit_before>#!/usr/bin/python
def get_secret_for_user(user, ipparam):
print "Looking up user %s with ipparam %s" % (user, ipparam)
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, ouraddr, hisaddr):
print "ip_up_notifier"
def ip_down_notifier(arg):
print "ip_down_notifier"
def auth_up_notifier(arg):
print "auth_up_notifier"
def link_down_notifier(arg):
print "link_down_notifier"
<commit_msg>Fix print statements for Python 3
Although I haven't tested Python 3 for this, all print statements have
been updated to use parenthesis. There are also some minor fixes to the
names of the ip-up hook arguments.<commit_after>#!/usr/bin/env python
def get_secret_for_user(user, ipparam):
print("Looking up user %s with ipparam %s" % (user, ipparam))
return "user_secret"
def allowed_address_hook(ip):
return True
def chap_check_hook():
return True
def ip_up_notifier(ifname, localip, remoteip):
print("ip_up_notifier")
def ip_down_notifier(arg):
print("ip_down_notifier")
def auth_up_notifier(arg):
print("auth_up_notifier")
def link_down_notifier(arg):
print("link_down_notifier")
|
e53d012e95434d2857c4998c161fc71abd30acc7
|
django_extensions/management/commands/_private.py
|
django_extensions/management/commands/_private.py
|
from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
|
from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
|
Fix trailing line style violation
|
Fix trailing line style violation
|
Python
|
mit
|
haakenlid/django-extensions,linuxmaniac/django-extensions,dpetzold/django-extensions,haakenlid/django-extensions,django-extensions/django-extensions,levic/django-extensions,linuxmaniac/django-extensions,kevgathuku/django-extensions,levic/django-extensions,dpetzold/django-extensions,jpadilla/django-extensions,kevgathuku/django-extensions,jpadilla/django-extensions,django-extensions/django-extensions,django-extensions/django-extensions,jpadilla/django-extensions,kevgathuku/django-extensions,levic/django-extensions,dpetzold/django-extensions,haakenlid/django-extensions,linuxmaniac/django-extensions
|
from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
Fix trailing line style violation
|
from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
|
<commit_before>from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
<commit_msg>Fix trailing line style violation<commit_after>
|
from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
|
from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
Fix trailing line style violationfrom six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
|
<commit_before>from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
<commit_msg>Fix trailing line style violation<commit_after>from six.moves import configparser
def parse_mysql_cnf(dbinfo):
"""
Attempt to parse mysql database config file for connection settings.
Ideally we would hook into django's code to do this, but read_default_file is handled by the mysql C libs
so we have to emulate the behaviour
Settings that are missing will return ''
returns (user, password, database_name, database_host, database_port)
"""
read_default_file = dbinfo.get('OPTIONS', {}).get('read_default_file')
if read_default_file:
config = configparser.RawConfigParser({
'user': '',
'password': '',
'database': '',
'host': '',
'port': '',
'socket': '',
})
import os
config.read(os.path.expanduser(read_default_file))
try:
user = config.get('client', 'user')
password = config.get('client', 'password')
database_name = config.get('client', 'database')
database_host = config.get('client', 'host')
database_port = config.get('client', 'port')
socket = config.get('client', 'socket')
if database_host == 'localhost' and socket:
# mysql actually uses a socket if host is localhost
database_host = socket
return user, password, database_name, database_host, database_port
except configparser.NoSectionError:
pass
return '', '', '', '', ''
|
5495913d43606407c7fb646b2a0eb4b5d4b80ba1
|
network/admin.py
|
network/admin.py
|
from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
admin.site.register(Device)
admin.site.register(Interface)
|
from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
from members.settings import MAX_INTERFACE_PER_DEVICE
class InterfaceAdmin(admin.TabularInline):
model = Interface
max_num = MAX_INTERFACE_PER_DEVICE
display = ('interface', 'mac_address', 'description')
view_on_site = False
@admin.register(Device)
class DeviceAdmin(admin.ModelAdmin):
list_display = ('user', 'device_name', 'description', 'add_date')
readonly_fields = ('device_ip',)
search_fields = ['user__username']
inlines = [InterfaceAdmin]
|
Add search box, show devices list in table, show interfaces in device detail
|
Add search box, show devices list in table, show interfaces in device detail
|
Python
|
mit
|
Atilla106/members.atilla.org,Atilla106/members.atilla.org,Atilla106/members.atilla.org,Atilla106/members.atilla.org,Atilla106/members.atilla.org
|
from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
admin.site.register(Device)
admin.site.register(Interface)
Add search box, show devices list in table, show interfaces in device detail
|
from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
from members.settings import MAX_INTERFACE_PER_DEVICE
class InterfaceAdmin(admin.TabularInline):
model = Interface
max_num = MAX_INTERFACE_PER_DEVICE
display = ('interface', 'mac_address', 'description')
view_on_site = False
@admin.register(Device)
class DeviceAdmin(admin.ModelAdmin):
list_display = ('user', 'device_name', 'description', 'add_date')
readonly_fields = ('device_ip',)
search_fields = ['user__username']
inlines = [InterfaceAdmin]
|
<commit_before>from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
admin.site.register(Device)
admin.site.register(Interface)
<commit_msg>Add search box, show devices list in table, show interfaces in device detail<commit_after>
|
from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
from members.settings import MAX_INTERFACE_PER_DEVICE
class InterfaceAdmin(admin.TabularInline):
model = Interface
max_num = MAX_INTERFACE_PER_DEVICE
display = ('interface', 'mac_address', 'description')
view_on_site = False
@admin.register(Device)
class DeviceAdmin(admin.ModelAdmin):
list_display = ('user', 'device_name', 'description', 'add_date')
readonly_fields = ('device_ip',)
search_fields = ['user__username']
inlines = [InterfaceAdmin]
|
from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
admin.site.register(Device)
admin.site.register(Interface)
Add search box, show devices list in table, show interfaces in device detailfrom django.contrib import admin
from .models.device import Device
from .models.interface import Interface
from members.settings import MAX_INTERFACE_PER_DEVICE
class InterfaceAdmin(admin.TabularInline):
model = Interface
max_num = MAX_INTERFACE_PER_DEVICE
display = ('interface', 'mac_address', 'description')
view_on_site = False
@admin.register(Device)
class DeviceAdmin(admin.ModelAdmin):
list_display = ('user', 'device_name', 'description', 'add_date')
readonly_fields = ('device_ip',)
search_fields = ['user__username']
inlines = [InterfaceAdmin]
|
<commit_before>from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
admin.site.register(Device)
admin.site.register(Interface)
<commit_msg>Add search box, show devices list in table, show interfaces in device detail<commit_after>from django.contrib import admin
from .models.device import Device
from .models.interface import Interface
from members.settings import MAX_INTERFACE_PER_DEVICE
class InterfaceAdmin(admin.TabularInline):
model = Interface
max_num = MAX_INTERFACE_PER_DEVICE
display = ('interface', 'mac_address', 'description')
view_on_site = False
@admin.register(Device)
class DeviceAdmin(admin.ModelAdmin):
list_display = ('user', 'device_name', 'description', 'add_date')
readonly_fields = ('device_ip',)
search_fields = ['user__username']
inlines = [InterfaceAdmin]
|
6d928da6c3848c2fd9f34772033fb645767ae4c3
|
dbaas/workflow/steps/util/resize/check_database_status.py
|
dbaas/workflow/steps/util/resize/check_database_status.py
|
# -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
return False
except Exception, e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
# -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if 'database' not in workflow_dict:
return False
if 'databaseinfra' not in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
except Exception as e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
Change check database status to return even when it is false
|
Change check database status to return even when it is false
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
# -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
return False
except Exception, e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
Change check database status to return even when it is false
|
# -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if 'database' not in workflow_dict:
return False
if 'databaseinfra' not in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
except Exception as e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
<commit_before># -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
return False
except Exception, e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
<commit_msg>Change check database status to return even when it is false<commit_after>
|
# -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if 'database' not in workflow_dict:
return False
if 'databaseinfra' not in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
except Exception as e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
# -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
return False
except Exception, e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
Change check database status to return even when it is false# -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if 'database' not in workflow_dict:
return False
if 'databaseinfra' not in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
except Exception as e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
<commit_before># -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
if not 'databaseinfra' in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
return False
except Exception, e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
<commit_msg>Change check database status to return even when it is false<commit_after># -*- coding: utf-8 -*-
import logging
from ...util.base import BaseStep
LOG = logging.getLogger(__name__)
class CheckDatabaseStatus(BaseStep):
def __unicode__(self):
return "Checking database status..."
def do(self, workflow_dict):
try:
if 'database' not in workflow_dict:
return False
if 'databaseinfra' not in workflow_dict:
workflow_dict['databaseinfra'] = workflow_dict[
'database'].databaseinfra
LOG.info("Getting driver class")
driver = workflow_dict['databaseinfra'].get_driver()
from time import sleep
sleep(60)
if driver.check_status():
LOG.info("Database is ok...")
workflow_dict['database'].status = 1
workflow_dict['database'].save()
return True
except Exception as e:
LOG.info("Error: {}".format(e))
pass
def undo(self, workflow_dict):
LOG.info("Nothing to do here...")
return True
|
d117d924a684e0ec651d9f91b5fa7fcdfceb8777
|
server.py
|
server.py
|
#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
|
#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
print("Playing turn %d with direction %s" % (state['game']['turn'], direction))
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
|
Add some debug when playing
|
Add some debug when playing
|
Python
|
apache-2.0
|
miguel89/vinidium
|
#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
Add some debug when playing
|
#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
print("Playing turn %d with direction %s" % (state['game']['turn'], direction))
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
|
<commit_before>#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
<commit_msg>Add some debug when playing<commit_after>
|
#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
print("Playing turn %d with direction %s" % (state['game']['turn'], direction))
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
|
#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
Add some debug when playing#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
print("Playing turn %d with direction %s" % (state['game']['turn'], direction))
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
|
<commit_before>#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
<commit_msg>Add some debug when playing<commit_after>#!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
SERVER_HOST = 'http://localhost:9000'
trainingState = requests.post(SERVER_HOST + '/api/training/alone').json()
state = trainingState
bot = RandomBot()
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_url):
def play(state):
if (state['game']['finished']):
print('game finished')
else:
url = state['playUrl']
direction = bot.move(state)
newState = move(state['playUrl'], direction)
print("Playing turn %d with direction %s" % (state['game']['turn'], direction))
play(newState)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 1):
SERVER_HOST = sys.argv[1]
start(sys.argv[1])
else:
print('Specify the server, ex: "http://localhost:9000"')
|
a0dda9abaebd154c8e4fd68206c0f10d796ae75d
|
tests/property/app_test.py
|
tests/property/app_test.py
|
# -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
|
# -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
|
Remove blank line at end of file.
|
Remove blank line at end of file.
|
Python
|
mit
|
BMeu/Orchard,BMeu/Orchard
|
# -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
Remove blank line at end of file.
|
# -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
|
<commit_before># -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
<commit_msg>Remove blank line at end of file.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
|
# -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
Remove blank line at end of file.# -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
|
<commit_before># -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
<commit_msg>Remove blank line at end of file.<commit_after># -*- coding: utf-8 -*-
"""
Property Test: orchard.app
"""
import hypothesis
import hypothesis.strategies as st
import unittest
import orchard
class AppPropertyTest(unittest.TestCase):
def setUp(self):
self.app_context = orchard.app.app_context()
self.app_context.push()
self.client = orchard.app.test_client(use_cookies = True)
def tearDown(self):
self.app_context.pop()
@hypothesis.given(name = st.text(alphabet = ['a', 'b', 'c', 'A', 'B', 'C']))
def test_index(self, name):
response = self.client.get('/{name}'.format(name = name))
data = response.get_data(as_text = True)
self.assertEqual(response.status_code, 200)
self.assertTrue(name in data)
|
d66a6325d210b075ed9aed7b2446aaf079df7936
|
blackbelt/tasks.py
|
blackbelt/tasks.py
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
Add alias -h to --help
|
Add alias -h to --help
|
Python
|
mit
|
apiaryio/black-belt
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
Add alias -h to --help
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
<commit_before>import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
<commit_msg>Add alias -h to --help<commit_after>
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
Add alias -h to --helpimport click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
<commit_before>import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
<commit_msg>Add alias -h to --help<commit_after>import click
import os
plugin_folder = os.path.join(os.path.dirname(__file__), 'commands')
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
class BlackBelt(click.MultiCommand):
def list_commands(self, ctx):
rv = []
for filename in os.listdir(plugin_folder):
if filename.endswith('.py') and filename != '__init__.py':
rv.append(filename[:-3])
rv.sort()
return rv
def get_command(self, ctx, name):
ns = {}
fn = os.path.join(plugin_folder, name + '.py')
with open(fn) as f:
code = compile(f.read(), fn, 'exec')
eval(code, ns, ns)
return ns['cli']
cli = BlackBelt(context_settings=CONTEXT_SETTINGS, help='Black Belt: automate project The Apiary Way. Please provide a command.')
# backward compatibility
def main():
cli()
if __name__ == '__main__':
cli()
|
424588f4cdad2dd063b15895198611703b187bec
|
pynpact/tests/steps/conftest.py
|
pynpact/tests/steps/conftest.py
|
import pytest
import taskqueue
@pytest.fixture(scope="session")
def async_executor(request):
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
import pytest
def taskqueue_executor():
import taskqueue
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
@pytest.fixture(scope="session")
def async_executor(request):
from pynpact.executors import GeventExecutor
return GeventExecutor()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
Make pynpact tests use GeventExecutor
|
Make pynpact tests use GeventExecutor
We've almost completely deprecated taskqueue at this point; lets test
the new pieces instead of th old.
|
Python
|
bsd-3-clause
|
NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact,NProfileAnalysisComputationalTool/npact
|
import pytest
import taskqueue
@pytest.fixture(scope="session")
def async_executor(request):
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
Make pynpact tests use GeventExecutor
We've almost completely deprecated taskqueue at this point; lets test
the new pieces instead of th old.
|
import pytest
def taskqueue_executor():
import taskqueue
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
@pytest.fixture(scope="session")
def async_executor(request):
from pynpact.executors import GeventExecutor
return GeventExecutor()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
<commit_before>import pytest
import taskqueue
@pytest.fixture(scope="session")
def async_executor(request):
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
<commit_msg>Make pynpact tests use GeventExecutor
We've almost completely deprecated taskqueue at this point; lets test
the new pieces instead of th old.<commit_after>
|
import pytest
def taskqueue_executor():
import taskqueue
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
@pytest.fixture(scope="session")
def async_executor(request):
from pynpact.executors import GeventExecutor
return GeventExecutor()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
import pytest
import taskqueue
@pytest.fixture(scope="session")
def async_executor(request):
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
Make pynpact tests use GeventExecutor
We've almost completely deprecated taskqueue at this point; lets test
the new pieces instead of th old.import pytest
def taskqueue_executor():
import taskqueue
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
@pytest.fixture(scope="session")
def async_executor(request):
from pynpact.executors import GeventExecutor
return GeventExecutor()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
<commit_before>import pytest
import taskqueue
@pytest.fixture(scope="session")
def async_executor(request):
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
<commit_msg>Make pynpact tests use GeventExecutor
We've almost completely deprecated taskqueue at this point; lets test
the new pieces instead of th old.<commit_after>import pytest
def taskqueue_executor():
import taskqueue
taskqueue.LISTEN_ADDRESS = ('127.0.1.1', 57129)
sm = taskqueue.get_ServerManager(make_server=True)
sm.start()
request.addfinalizer(sm.shutdown)
return sm.Server()
@pytest.fixture(scope="session")
def async_executor(request):
from pynpact.executors import GeventExecutor
return GeventExecutor()
class NullExecutor(object):
"An executor that doens't actually execute anything, just keeps track"
tasks = None
def __init__(self):
self.tasks = {}
def enqueue(self, callable, tid=None, after=None):
if tid is None:
tid = randomid()
if after is not None:
for aid in after:
assert aid in self.tasks, \
"The NullExecutor can't be after a task that doesn't exist yet"
if tid not in self.tasks:
self.tasks[tid] = callable
return tid
@pytest.fixture
def null_executor(request):
return NullExecutor()
|
3cdff0baeb349bcf4761269cc289cf2722ecbe62
|
rasp/base.py
|
rasp/base.py
|
import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
if headers is None:
self.headers = {'User-Agent': DEFAULT_USER_AGENT}
else:
self.headers = headers
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
|
import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
self.headers = headers or {'User-Agent': DEFAULT_USER_AGENT}
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
|
Use more pythonic pattern for default attributes
|
Use more pythonic pattern for default attributes
|
Python
|
bsd-3-clause
|
anidata/rasp
|
import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
if headers is None:
self.headers = {'User-Agent': DEFAULT_USER_AGENT}
else:
self.headers = headers
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
Use more pythonic pattern for default attributes
|
import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
self.headers = headers or {'User-Agent': DEFAULT_USER_AGENT}
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
|
<commit_before>import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
if headers is None:
self.headers = {'User-Agent': DEFAULT_USER_AGENT}
else:
self.headers = headers
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
<commit_msg>Use more pythonic pattern for default attributes<commit_after>
|
import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
self.headers = headers or {'User-Agent': DEFAULT_USER_AGENT}
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
|
import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
if headers is None:
self.headers = {'User-Agent': DEFAULT_USER_AGENT}
else:
self.headers = headers
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
Use more pythonic pattern for default attributesimport urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
self.headers = headers or {'User-Agent': DEFAULT_USER_AGENT}
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
|
<commit_before>import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
if headers is None:
self.headers = {'User-Agent': DEFAULT_USER_AGENT}
else:
self.headers = headers
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
<commit_msg>Use more pythonic pattern for default attributes<commit_after>import urllib
import urllib.error
import urllib.request
from rasp.constants import DEFAULT_USER_AGENT
from rasp.errors import EngineError
class Engine(object):
def get_page_source(self, url):
raise NotImplemented("get_page_source not implemented for {}"
.format(str(self.__class__.__name__)))
def cleanup(self):
return
class DefaultEngine(Engine):
def __init__(self, data=None, headers=None):
self.data = data
self.headers = headers or {'User-Agent': DEFAULT_USER_AGENT}
def __copy__(self):
return DefaultEngine(self.data, self.headers)
def get_page_source(self, url, data=None):
if not url:
return EngineError('url needs to be specified')
data = self.data or data
try:
req = urllib.request.Request(url, data, self.headers)
source = str(urllib.request.urlopen(req).read())
return Webpage(url, source)
except urllib.error.HTTPError as e:
return
class Webpage(object):
def __init__(self, url=None, source=None):
self.url = url
self.source = source
def set_source(self, source):
self.source = source
def set_url(self, url):
self.url = url
def __repr__(self):
return "url: {}".format(self.url)
|
097bbde9aabc09d6bca679663c0ece3e12802414
|
utils/esicog.py
|
utils/esicog.py
|
import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if self._esi_app_task is None:
logger.info("Creating esipy App...")
self._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
self._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
def __unload(self):
self._esi_app_task.cancel()
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
|
import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if EsiCog._esi_app_task is None:
logger.info("Creating esipy App...")
EsiCog._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
EsiCog._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
|
Fix reference to class attribute
|
Fix reference to class attribute
|
Python
|
mit
|
randomic/antinub-gregbot
|
import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if self._esi_app_task is None:
logger.info("Creating esipy App...")
self._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
self._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
def __unload(self):
self._esi_app_task.cancel()
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
Fix reference to class attribute
|
import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if EsiCog._esi_app_task is None:
logger.info("Creating esipy App...")
EsiCog._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
EsiCog._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
|
<commit_before>import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if self._esi_app_task is None:
logger.info("Creating esipy App...")
self._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
self._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
def __unload(self):
self._esi_app_task.cancel()
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
<commit_msg>Fix reference to class attribute<commit_after>
|
import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if EsiCog._esi_app_task is None:
logger.info("Creating esipy App...")
EsiCog._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
EsiCog._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
|
import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if self._esi_app_task is None:
logger.info("Creating esipy App...")
self._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
self._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
def __unload(self):
self._esi_app_task.cancel()
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
Fix reference to class attributeimport asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if EsiCog._esi_app_task is None:
logger.info("Creating esipy App...")
EsiCog._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
EsiCog._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
|
<commit_before>import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if self._esi_app_task is None:
logger.info("Creating esipy App...")
self._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
self._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
def __unload(self):
self._esi_app_task.cancel()
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
<commit_msg>Fix reference to class attribute<commit_after>import asyncio
import esipy
from discord.ext import commands
from requests.adapters import DEFAULT_POOLSIZE
from utils.log import get_logger
ESI_SWAGGER_JSON = 'https://esi.evetech.net/dev/swagger.json'
class EsiCog:
_esi_app_task: asyncio.Task = None
_semaphore = asyncio.Semaphore(DEFAULT_POOLSIZE)
def __init__(self, bot: commands.Bot):
logger = get_logger(__name__, bot)
if EsiCog._esi_app_task is None:
logger.info("Creating esipy App...")
EsiCog._esi_app_task = bot.loop.run_in_executor(
None, self._create_esi_app)
EsiCog._esi_app_task.add_done_callback(
lambda f: logger.info("esipy App created"))
async def get_esi_app(self) -> asyncio.Task:
return await self._esi_app_task
def _create_esi_app(self):
return esipy.App.create(url=ESI_SWAGGER_JSON)
async def esi_request(self, loop, client, operation):
async with self._semaphore:
return await loop.run_in_executor(None, client.request, operation)
|
52240f7712026332121597fbb8f0ad0e62bdb5e0
|
yolk/__init__.py
|
yolk/__init__.py
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7'
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7.1'
|
Increment patch version to 0.7.1
|
Increment patch version to 0.7.1
|
Python
|
bsd-3-clause
|
myint/yolk,myint/yolk
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7'
Increment patch version to 0.7.1
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7.1'
|
<commit_before>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7'
<commit_msg>Increment patch version to 0.7.1<commit_after>
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7.1'
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7'
Increment patch version to 0.7.1"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7.1'
|
<commit_before>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7'
<commit_msg>Increment patch version to 0.7.1<commit_after>"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.7.1'
|
2d4a3b82d982017fe51e2dd23eca7f1d83ad115f
|
plugoo/assets.py
|
plugoo/assets.py
|
class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
return self.parse_line(line)
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
|
class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
parsed_line = self.parse_line(line)
if parsed_line:
return parsed_line
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
|
Add a method for line by line asset parsing
|
Add a method for line by line asset parsing
|
Python
|
bsd-2-clause
|
Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,hackerberry/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,hackerberry/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe
|
class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
return self.parse_line(line)
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
Add a method for line by line asset parsing
|
class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
parsed_line = self.parse_line(line)
if parsed_line:
return parsed_line
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
|
<commit_before>class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
return self.parse_line(line)
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
<commit_msg>Add a method for line by line asset parsing<commit_after>
|
class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
parsed_line = self.parse_line(line)
if parsed_line:
return parsed_line
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
|
class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
return self.parse_line(line)
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
Add a method for line by line asset parsingclass Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
parsed_line = self.parse_line(line)
if parsed_line:
return parsed_line
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
|
<commit_before>class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
return self.parse_line(line)
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
<commit_msg>Add a method for line by line asset parsing<commit_after>class Asset:
"""
This is an ooni-probe asset. It is a python
iterator object, allowing it to be efficiently looped.
To create your own custom asset your should subclass this
and override the next_asset method and the len method for
computing the length of the asset.
"""
def __init__(self, file=None, *args, **argv):
self.fh = None
if file:
self.name = file
self.fh = open(file, 'r')
self.eof = False
def __iter__(self):
return self
def len(self):
"""
Returns the length of the asset
"""
for i, l in enumerate(self.fh):
pass
# rewind the file
self.fh.seek(0)
return i + 1
def parse_line(self, line):
"""
Override this method if you need line
by line parsing of an Asset.
"""
return line.replace('\n','')
def next_asset(self):
"""
Return the next asset.
"""
# XXX this is really written with my feet.
# clean me up please...
line = self.fh.readline()
if line:
parsed_line = self.parse_line(line)
if parsed_line:
return parsed_line
else:
self.fh.seek(0)
raise StopIteration
def next(self):
try:
return self.next_asset()
except:
raise StopIteration
|
93dd1f0e19f2483d4f699f5c59f5f2eb1d5079b0
|
comrade/views/simple.py
|
comrade/views/simple.py
|
from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.info("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
|
from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.debug("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def version(request, version_attribute='GIT_COMMIT'):
return HttpResponse(getattr(settings, version_attribute))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
|
Add version view, since we want all apps to provide this.
|
Add version view, since we want all apps to provide this.
|
Python
|
mit
|
bueda/django-comrade
|
from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.info("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
Add version view, since we want all apps to provide this.
|
from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.debug("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def version(request, version_attribute='GIT_COMMIT'):
return HttpResponse(getattr(settings, version_attribute))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
|
<commit_before>from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.info("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
<commit_msg>Add version view, since we want all apps to provide this.<commit_after>
|
from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.debug("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def version(request, version_attribute='GIT_COMMIT'):
return HttpResponse(getattr(settings, version_attribute))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
|
from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.info("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
Add version view, since we want all apps to provide this.from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.debug("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def version(request, version_attribute='GIT_COMMIT'):
return HttpResponse(getattr(settings, version_attribute))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
|
<commit_before>from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.info("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
<commit_msg>Add version view, since we want all apps to provide this.<commit_after>from django.http import HttpResponse, HttpResponseServerError
from django.template import RequestContext, loader
from django.conf import settings
from maintenancemode.http import HttpResponseTemporaryUnavailable
import logging
logger = logging.getLogger('comrade.views.simple')
def status(request):
logger.debug("Responding to status check")
return HttpResponse()
def server_error(request, template_name='500.html'):
t = loader.get_template(template_name)
return HttpResponseServerError(t.render(RequestContext(request)))
def maintenance_mode(request, template_name='503.html'):
t = loader.get_template(template_name)
return HttpResponseTemporaryUnavailable(t.render(RequestContext(request)))
def version(request, version_attribute='GIT_COMMIT'):
return HttpResponse(getattr(settings, version_attribute))
def direct_to_template(request, template, extra_context=None, mimetype=None,
status=None, **kwargs):
'''
Duplicates behavior of django.views.generic.simple.direct_to_template
but accepts a status argument.
'''
if extra_context is None:
extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), status=status,
mimetype=mimetype)
|
d51b7bf7766dc4d56158fcc7e072e27f275f57e8
|
skyfield/__init__.py
|
skyfield/__init__.py
|
"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
__version_info__ = (0, 2)
__version__ = '%s.%s' % __version_info__
|
"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
VERSION = (0, 2)
__version__ = '.'.join(map(str, VERSION))
|
Rename silly __version_info__ symbol to VERSION
|
Rename silly __version_info__ symbol to VERSION
Django uses the name VERSION and it always seems good to rid ourselves
of another faux-dunder symbol that really means nothing to the Python
runtime and therefore should not enjoy the (in-?)dignity of a dunder.
|
Python
|
mit
|
GuidoBR/python-skyfield,ozialien/python-skyfield,exoanalytic/python-skyfield,ozialien/python-skyfield,exoanalytic/python-skyfield,skyfielders/python-skyfield,GuidoBR/python-skyfield,skyfielders/python-skyfield
|
"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
__version_info__ = (0, 2)
__version__ = '%s.%s' % __version_info__
Rename silly __version_info__ symbol to VERSION
Django uses the name VERSION and it always seems good to rid ourselves
of another faux-dunder symbol that really means nothing to the Python
runtime and therefore should not enjoy the (in-?)dignity of a dunder.
|
"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
VERSION = (0, 2)
__version__ = '.'.join(map(str, VERSION))
|
<commit_before>"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
__version_info__ = (0, 2)
__version__ = '%s.%s' % __version_info__
<commit_msg>Rename silly __version_info__ symbol to VERSION
Django uses the name VERSION and it always seems good to rid ourselves
of another faux-dunder symbol that really means nothing to the Python
runtime and therefore should not enjoy the (in-?)dignity of a dunder.<commit_after>
|
"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
VERSION = (0, 2)
__version__ = '.'.join(map(str, VERSION))
|
"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
__version_info__ = (0, 2)
__version__ = '%s.%s' % __version_info__
Rename silly __version_info__ symbol to VERSION
Django uses the name VERSION and it always seems good to rid ourselves
of another faux-dunder symbol that really means nothing to the Python
runtime and therefore should not enjoy the (in-?)dignity of a dunder."""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
VERSION = (0, 2)
__version__ = '.'.join(map(str, VERSION))
|
<commit_before>"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
__version_info__ = (0, 2)
__version__ = '%s.%s' % __version_info__
<commit_msg>Rename silly __version_info__ symbol to VERSION
Django uses the name VERSION and it always seems good to rid ourselves
of another faux-dunder symbol that really means nothing to the Python
runtime and therefore should not enjoy the (in-?)dignity of a dunder.<commit_after>"""Elegant astronomy for Python
Most users will use Skyfield by importing ``skyfield.api`` and using the
functions and classes there.
"""
VERSION = (0, 2)
__version__ = '.'.join(map(str, VERSION))
|
1d924ad8d47260b90f71d7f805cdd1a6aa734c2c
|
conda_build/external.py
|
conda_build/external.py
|
from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
|
from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
# dir_paths is referenced as a module-level variable
# in other code
global dir_paths
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
|
Fix error message when patch is missing.
|
Fix error message when patch is missing.
|
Python
|
bsd-3-clause
|
shastings517/conda-build,dan-blanchard/conda-build,ilastik/conda-build,takluyver/conda-build,takluyver/conda-build,rmcgibbo/conda-build,ilastik/conda-build,frol/conda-build,sandhujasmine/conda-build,dan-blanchard/conda-build,dan-blanchard/conda-build,takluyver/conda-build,sandhujasmine/conda-build,sandhujasmine/conda-build,mwcraig/conda-build,frol/conda-build,mwcraig/conda-build,mwcraig/conda-build,rmcgibbo/conda-build,rmcgibbo/conda-build,shastings517/conda-build,ilastik/conda-build,shastings517/conda-build,frol/conda-build
|
from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
Fix error message when patch is missing.
|
from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
# dir_paths is referenced as a module-level variable
# in other code
global dir_paths
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
|
<commit_before>from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
<commit_msg>Fix error message when patch is missing.<commit_after>
|
from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
# dir_paths is referenced as a module-level variable
# in other code
global dir_paths
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
|
from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
Fix error message when patch is missing.from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
# dir_paths is referenced as a module-level variable
# in other code
global dir_paths
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
|
<commit_before>from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
<commit_msg>Fix error message when patch is missing.<commit_after>from __future__ import absolute_import, division, print_function
import os
import sys
from os.path import isfile, join, expanduser
import conda.config as cc
from conda_build.config import config
def find_executable(executable):
# dir_paths is referenced as a module-level variable
# in other code
global dir_paths
if sys.platform == 'win32':
dir_paths = [join(config.build_prefix, 'Scripts'),
join(cc.root_dir, 'Scripts'),
'C:\\cygwin\\bin']
else:
dir_paths = [join(config.build_prefix, 'bin'),
join(cc.root_dir, 'bin'),]
dir_paths.extend(os.environ['PATH'].split(os.pathsep))
for dir_path in dir_paths:
if sys.platform == 'win32':
for ext in '.exe', '.bat', '':
path = join(dir_path, executable + ext)
if isfile(path):
return path
else:
path = join(dir_path, executable)
if isfile(expanduser(path)):
return expanduser(path)
return None
|
683a3727fc5363c2a2a53fabfde555207e8bab66
|
brains/orders/models.py
|
brains/orders/models.py
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True, blank=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
|
Allow strike time to be blank.
|
Allow strike time to be blank.
|
Python
|
bsd-3-clause
|
crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains,crisisking/udbraaains
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
Allow strike time to be blank.
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True, blank=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
<commit_msg>Allow strike time to be blank.<commit_after>
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True, blank=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
|
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
Allow strike time to be blank.from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True, blank=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
|
<commit_before>from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
<commit_msg>Allow strike time to be blank.<commit_after>from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class Order(models.Model):
user = models.ForeignKey(User)
# Last updated timestamp, used for sorting
date = models.DateTimeField(auto_now_add=True, auto_now=True)
# When we will strike
striketime = models.DateTimeField(null=True, blank=True)
message = models.TextField()
subject = models.CharField(max_length=250)
# Coordinates of where this news applies
# By default, set it to the middle of the map
x = models.IntegerField(default=50)
y = models.IntegerField(default=50)
def __unicode__(self):
return self.subject
|
88263748a1ec742e514b6f321002d06e6e79b36e
|
plim/adapters/babelplugin.py
|
plim/adapters/babelplugin.py
|
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
data = lexer.compile_plim_source(fileobj.read().decode(encoding))
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
|
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO, PY3K
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
raw_data = fileobj.read()
if not PY3K:
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
raw_data = raw_data.decode(encoding)
data = lexer.compile_plim_source(raw_data)
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
|
Fix Babel plugin in Python3 environment
|
Fix Babel plugin in Python3 environment
|
Python
|
mit
|
kxxoling/Plim
|
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
data = lexer.compile_plim_source(fileobj.read().decode(encoding))
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
Fix Babel plugin in Python3 environment
|
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO, PY3K
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
raw_data = fileobj.read()
if not PY3K:
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
raw_data = raw_data.decode(encoding)
data = lexer.compile_plim_source(raw_data)
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
|
<commit_before>"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
data = lexer.compile_plim_source(fileobj.read().decode(encoding))
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
<commit_msg>Fix Babel plugin in Python3 environment<commit_after>
|
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO, PY3K
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
raw_data = fileobj.read()
if not PY3K:
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
raw_data = raw_data.decode(encoding)
data = lexer.compile_plim_source(raw_data)
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
|
"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
data = lexer.compile_plim_source(fileobj.read().decode(encoding))
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
Fix Babel plugin in Python3 environment"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO, PY3K
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
raw_data = fileobj.read()
if not PY3K:
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
raw_data = raw_data.decode(encoding)
data = lexer.compile_plim_source(raw_data)
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
|
<commit_before>"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
data = lexer.compile_plim_source(fileobj.read().decode(encoding))
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
<commit_msg>Fix Babel plugin in Python3 environment<commit_after>"""gettext message extraction via Babel: http://babel.edgewall.org/"""
from mako.ext.babelplugin import extract as _extract_mako
from .. import lexer
from ..util import StringIO, PY3K
def extract(fileobj, keywords, comment_tags, options):
"""Extract messages from Plim templates.
:param fileobj: the file-like object the messages should be extracted from
:param keywords: a list of keywords (i.e. function names) that should be
recognized as translation functions
:param comment_tags: a list of translator tags to search for and include
in the results
:param options: a dictionary of additional options (optional)
:return: an iterator over ``(lineno, funcname, message, comments)`` tuples
:rtype: ``iterator``
"""
raw_data = fileobj.read()
if not PY3K:
encoding = options.get('input_encoding', options.get('encoding', 'utf-8'))
raw_data = raw_data.decode(encoding)
data = lexer.compile_plim_source(raw_data)
for extracted in _extract_mako(StringIO(data), keywords, comment_tags, options):
yield extracted
|
b730fc84e07994d0a12357b70a1e912d0a032832
|
Lib/vanilla/test/testTools.py
|
Lib/vanilla/test/testTools.py
|
from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
|
from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test")
fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "")
fileMenu = NSMenu.alloc().initWithTitle_("File")
fileMenuItem.setSubmenu_(fileMenu)
mainMenu.addItem_(fileMenuItem)
editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "")
editMenu = NSMenu.alloc().initWithTitle_("Edit")
editMenuItem.setSubmenu_(editMenu)
mainMenu.addItem_(editMenuItem)
helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "")
helpMenu = NSMenu.alloc().initWithTitle_("Help")
helpMenuItem.setSubmenu_(helpMenu)
mainMenu.addItem_(helpMenuItem)
app.setMainMenu_(mainMenu)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
|
Add a menu to the test runner.
|
Add a menu to the test runner.
|
Python
|
mit
|
typemytype/vanilla,typesupply/vanilla,moyogo/vanilla
|
from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
Add a menu to the test runner.
|
from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test")
fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "")
fileMenu = NSMenu.alloc().initWithTitle_("File")
fileMenuItem.setSubmenu_(fileMenu)
mainMenu.addItem_(fileMenuItem)
editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "")
editMenu = NSMenu.alloc().initWithTitle_("Edit")
editMenuItem.setSubmenu_(editMenu)
mainMenu.addItem_(editMenuItem)
helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "")
helpMenu = NSMenu.alloc().initWithTitle_("Help")
helpMenuItem.setSubmenu_(helpMenu)
mainMenu.addItem_(helpMenuItem)
app.setMainMenu_(mainMenu)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
|
<commit_before>from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
<commit_msg>Add a menu to the test runner.<commit_after>
|
from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test")
fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "")
fileMenu = NSMenu.alloc().initWithTitle_("File")
fileMenuItem.setSubmenu_(fileMenu)
mainMenu.addItem_(fileMenuItem)
editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "")
editMenu = NSMenu.alloc().initWithTitle_("Edit")
editMenuItem.setSubmenu_(editMenu)
mainMenu.addItem_(editMenuItem)
helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "")
helpMenu = NSMenu.alloc().initWithTitle_("Help")
helpMenuItem.setSubmenu_(helpMenu)
mainMenu.addItem_(helpMenuItem)
app.setMainMenu_(mainMenu)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
|
from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
Add a menu to the test runner.from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test")
fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "")
fileMenu = NSMenu.alloc().initWithTitle_("File")
fileMenuItem.setSubmenu_(fileMenu)
mainMenu.addItem_(fileMenuItem)
editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "")
editMenu = NSMenu.alloc().initWithTitle_("Edit")
editMenuItem.setSubmenu_(editMenu)
mainMenu.addItem_(editMenuItem)
helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "")
helpMenu = NSMenu.alloc().initWithTitle_("Help")
helpMenuItem.setSubmenu_(helpMenu)
mainMenu.addItem_(helpMenuItem)
app.setMainMenu_(mainMenu)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
|
<commit_before>from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
<commit_msg>Add a menu to the test runner.<commit_after>from AppKit import *
from PyObjCTools import AppHelper
class _VanillaMiniAppDelegate(NSObject):
def applicationShouldTerminateAfterLastWindowClosed_(self, notification):
return True
def executeVanillaTest(cls, **kwargs):
"""
Execute a Vanilla UI class in a mini application.
"""
app = NSApplication.sharedApplication()
delegate = _VanillaMiniAppDelegate.alloc().init()
app.setDelegate_(delegate)
mainMenu = NSMenu.alloc().initWithTitle_("Vanilla Test")
fileMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("File", None, "")
fileMenu = NSMenu.alloc().initWithTitle_("File")
fileMenuItem.setSubmenu_(fileMenu)
mainMenu.addItem_(fileMenuItem)
editMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Edit", None, "")
editMenu = NSMenu.alloc().initWithTitle_("Edit")
editMenuItem.setSubmenu_(editMenu)
mainMenu.addItem_(editMenuItem)
helpMenuItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_("Help", None, "")
helpMenu = NSMenu.alloc().initWithTitle_("Help")
helpMenuItem.setSubmenu_(helpMenu)
mainMenu.addItem_(helpMenuItem)
app.setMainMenu_(mainMenu)
cls(**kwargs)
app.activateIgnoringOtherApps_(True)
AppHelper.runEventLoop()
|
0dc45239dde56ec7f1406646de4749a5cf43303e
|
proj_name/app_name/models.py
|
proj_name/app_name/models.py
|
from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
|
from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
class PositionedTag(Tag):
position = models.IntegerField()
|
Add a test for django model inheritance of the foreign key kind.
|
Add a test for django model inheritance of the foreign key kind.
|
Python
|
bsd-3-clause
|
g2p/tranquil
|
from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
Add a test for django model inheritance of the foreign key kind.
|
from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
class PositionedTag(Tag):
position = models.IntegerField()
|
<commit_before>from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
<commit_msg>Add a test for django model inheritance of the foreign key kind.<commit_after>
|
from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
class PositionedTag(Tag):
position = models.IntegerField()
|
from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
Add a test for django model inheritance of the foreign key kind.from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
class PositionedTag(Tag):
position = models.IntegerField()
|
<commit_before>from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
<commit_msg>Add a test for django model inheritance of the foreign key kind.<commit_after>from django.db import models
class Poll(models.Model):
question = models.CharField(max_length=200)
pub_date = models.DateTimeField('date_published')
class Admin:
pass
def __unicode__(self):
return "<Poll '%s'>" % self.question
class Tag(models.Model):
name = models.CharField(max_length=200)
class Choice(models.Model):
poll = models.ForeignKey(Poll)
tags = models.ManyToManyField(Tag)
choice = models.CharField(max_length=200)
votes = models.IntegerField()
class Admin:
pass
def __unicode__(self):
return "<Choice '%s'>" % self.choice
class SelfRef(models.Model):
parent = models.ForeignKey('self',null=True)
name = models.CharField(max_length=50)
class MultiSelfRef(models.Model):
name = models.CharField(max_length=50)
ref = models.ManyToManyField('self')
class PositionedTag(Tag):
position = models.IntegerField()
|
16dda42316176f0ad9c747731764855792fe88d6
|
lymph/utils/observables.py
|
lymph/utils/observables.py
|
# Taken from https://github.com/delivero/lymph-storage/blob/master/lymph/storage/observables.py
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, action, callback):
self.observers.setdefault(action, []).append(callback)
|
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
kwargs.setdefault('action', action)
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, actions, callback):
if not isinstance(actions, (tuple, list)):
actions = (actions,)
for action in actions:
self.observers.setdefault(action, []).append(callback)
|
Allow observing more than one action at once
|
Allow observing more than one action at once
|
Python
|
apache-2.0
|
lyudmildrx/lymph,mouadino/lymph,Drahflow/lymph,itakouna/lymph,vpikulik/lymph,deliveryhero/lymph,kstrempel/lymph,alazaro/lymph,lyudmildrx/lymph,itakouna/lymph,mamachanko/lymph,torte/lymph,mamachanko/lymph,lyudmildrx/lymph,alazaro/lymph,mouadino/lymph,mamachanko/lymph,mouadino/lymph,alazaro/lymph,itakouna/lymph,dushyant88/lymph
|
# Taken from https://github.com/delivero/lymph-storage/blob/master/lymph/storage/observables.py
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, action, callback):
self.observers.setdefault(action, []).append(callback)
Allow observing more than one action at once
|
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
kwargs.setdefault('action', action)
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, actions, callback):
if not isinstance(actions, (tuple, list)):
actions = (actions,)
for action in actions:
self.observers.setdefault(action, []).append(callback)
|
<commit_before># Taken from https://github.com/delivero/lymph-storage/blob/master/lymph/storage/observables.py
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, action, callback):
self.observers.setdefault(action, []).append(callback)
<commit_msg>Allow observing more than one action at once<commit_after>
|
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
kwargs.setdefault('action', action)
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, actions, callback):
if not isinstance(actions, (tuple, list)):
actions = (actions,)
for action in actions:
self.observers.setdefault(action, []).append(callback)
|
# Taken from https://github.com/delivero/lymph-storage/blob/master/lymph/storage/observables.py
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, action, callback):
self.observers.setdefault(action, []).append(callback)
Allow observing more than one action at once
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
kwargs.setdefault('action', action)
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, actions, callback):
if not isinstance(actions, (tuple, list)):
actions = (actions,)
for action in actions:
self.observers.setdefault(action, []).append(callback)
|
<commit_before># Taken from https://github.com/delivero/lymph-storage/blob/master/lymph/storage/observables.py
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, action, callback):
self.observers.setdefault(action, []).append(callback)
<commit_msg>Allow observing more than one action at once<commit_after>
class Observable(object):
def __init__(self):
self.observers = {}
def notify_observers(self, action, *args, **kwargs):
kwargs.setdefault('action', action)
for callback in self.observers.get(action, ()):
callback(*args, **kwargs)
def observe(self, actions, callback):
if not isinstance(actions, (tuple, list)):
actions = (actions,)
for action in actions:
self.observers.setdefault(action, []).append(callback)
|
8061c40275fb803952a9d0eec2f58788f07673c7
|
src/presence_analyzer/main.py
|
src/presence_analyzer/main.py
|
# -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG = True,
DATA_CSV = MAIN_DATA_CSV
)
|
# -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG=True,
DATA_CSV=MAIN_DATA_CSV
)
|
Remove spaces in function named arguments
|
Remove spaces in function named arguments
|
Python
|
mit
|
stxnext-kindergarten/presence-analyzer-pburniak,stxnext-kindergarten/presence-analyzer-pburniak
|
# -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG = True,
DATA_CSV = MAIN_DATA_CSV
)
Remove spaces in function named arguments
|
# -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG=True,
DATA_CSV=MAIN_DATA_CSV
)
|
<commit_before># -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG = True,
DATA_CSV = MAIN_DATA_CSV
)
<commit_msg>Remove spaces in function named arguments<commit_after>
|
# -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG=True,
DATA_CSV=MAIN_DATA_CSV
)
|
# -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG = True,
DATA_CSV = MAIN_DATA_CSV
)
Remove spaces in function named arguments# -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG=True,
DATA_CSV=MAIN_DATA_CSV
)
|
<commit_before># -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG = True,
DATA_CSV = MAIN_DATA_CSV
)
<commit_msg>Remove spaces in function named arguments<commit_after># -*- coding: utf-8 -*-
"""
Flask app initialization.
"""
import os.path
from flask import Flask
MAIN_DATA_CSV = os.path.join(
os.path.dirname(__file__), '..', '..', 'runtime', 'data', 'sample_data.csv'
)
app = Flask(__name__) # pylint: disable=invalid-name
app.config.update(
DEBUG=True,
DATA_CSV=MAIN_DATA_CSV
)
|
383480592964f5b1f9fd0bb31672464d94ee46e5
|
test/integration/022_bigquery_test/test_bigquery_adapter_specific.py
|
test/integration/022_bigquery_test/test_bigquery_adapter_specific.py
|
""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
_, stdout = self.run_dbt_and_capture()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
|
""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
results = self.run_dbt()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', results[0].node.injected_sql)
|
Use injected sql from results
|
Use injected sql from results
|
Python
|
apache-2.0
|
analyst-collective/dbt,analyst-collective/dbt
|
""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
_, stdout = self.run_dbt_and_capture()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
Use injected sql from results
|
""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
results = self.run_dbt()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', results[0].node.injected_sql)
|
<commit_before>""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
_, stdout = self.run_dbt_and_capture()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
<commit_msg>Use injected sql from results<commit_after>
|
""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
results = self.run_dbt()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', results[0].node.injected_sql)
|
""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
_, stdout = self.run_dbt_and_capture()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
Use injected sql from results""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
results = self.run_dbt()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', results[0].node.injected_sql)
|
<commit_before>""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
_, stdout = self.run_dbt_and_capture()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', stdout)
<commit_msg>Use injected sql from results<commit_after>""""Test adapter specific config options."""
from test.integration.base import DBTIntegrationTest, use_profile
import textwrap
import yaml
class TestBigqueryAdapterSpecific(DBTIntegrationTest):
@property
def schema(self):
return "bigquery_test_022"
@property
def models(self):
return "adapter-specific-models"
@property
def profile_config(self):
return self.bigquery_profile()
@property
def project_config(self):
return yaml.safe_load(textwrap.dedent('''\
config-version: 2
models:
test:
materialized: table
expiring_table:
time_to_expiration: 4
'''))
@use_profile('bigquery')
def test_bigquery_time_to_expiration(self):
results = self.run_dbt()
self.assertIn(
'expiration_timestamp: TIMESTAMP_ADD(CURRENT_TIMESTAMP(), INTERVAL '
'4 hour)', results[0].node.injected_sql)
|
8f97399b10b99ea132c9928ff8a852f0b06d2064
|
api/api/setup.py
|
api/api/setup.py
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
Index on the cache collection. More work could be done here
|
Index on the cache collection. More work could be done here
|
Python
|
mit
|
IceCTF/ctf-platform,IceCTF/ctf-platform,IceCTF/ctf-platform
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
Index on the cache collection. More work could be done here
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
<commit_before>"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
<commit_msg>Index on the cache collection. More work could be done here<commit_after>
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
Index on the cache collection. More work could be done here"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
<commit_before>"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
<commit_msg>Index on the cache collection. More work could be done here<commit_after>"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
ab63a427361ef72f0e573654cf6100754b268616
|
l10n_es_facturae/components/edi_output_l10n_es_facturae.py
|
l10n_es_facturae/components/edi_output_l10n_es_facturae.py
|
# Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "edi.output.generate.l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
|
# Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "output.generate"
_backend_type = "l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
|
Fix components names using the new logic
|
[FIX] l10n_es_facturae: Fix components names using the new logic
|
Python
|
agpl-3.0
|
OCA/l10n-spain,OCA/l10n-spain,OCA/l10n-spain
|
# Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "edi.output.generate.l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
[FIX] l10n_es_facturae: Fix components names using the new logic
|
# Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "output.generate"
_backend_type = "l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
|
<commit_before># Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "edi.output.generate.l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
<commit_msg>[FIX] l10n_es_facturae: Fix components names using the new logic<commit_after>
|
# Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "output.generate"
_backend_type = "l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
|
# Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "edi.output.generate.l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
[FIX] l10n_es_facturae: Fix components names using the new logic# Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "output.generate"
_backend_type = "l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
|
<commit_before># Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "edi.output.generate.l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
<commit_msg>[FIX] l10n_es_facturae: Fix components names using the new logic<commit_after># Copyright 2020 Creu Blanca
# @author: Enric Tobella
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo.addons.component.core import Component
class EdiOutputL10nEsFacturae(Component):
_name = "edi.output.generate.l10n_es_facturae"
_inherit = "edi.component.output.mixin"
_usage = "output.generate"
_backend_type = "l10n_es_facturae"
def generate(self):
return self.exchange_record.record.get_facturae(True)[0]
|
277065e53aa58bac3bad3c2511ffc867984b010b
|
scoring_engine/config_loader.py
|
scoring_engine/config_loader.py
|
import configparser
import os
class ConfigLoader(object):
def __init__(self, location=None):
if location is None:
location = "../engine.conf"
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
|
import configparser
import os
class ConfigLoader(object):
def __init__(self, location="../engine.conf"):
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
|
Improve config loader init function
|
Improve config loader init function
|
Python
|
mit
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
import configparser
import os
class ConfigLoader(object):
def __init__(self, location=None):
if location is None:
location = "../engine.conf"
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
Improve config loader init function
|
import configparser
import os
class ConfigLoader(object):
def __init__(self, location="../engine.conf"):
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
|
<commit_before>import configparser
import os
class ConfigLoader(object):
def __init__(self, location=None):
if location is None:
location = "../engine.conf"
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
<commit_msg>Improve config loader init function<commit_after>
|
import configparser
import os
class ConfigLoader(object):
def __init__(self, location="../engine.conf"):
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
|
import configparser
import os
class ConfigLoader(object):
def __init__(self, location=None):
if location is None:
location = "../engine.conf"
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
Improve config loader init functionimport configparser
import os
class ConfigLoader(object):
def __init__(self, location="../engine.conf"):
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
|
<commit_before>import configparser
import os
class ConfigLoader(object):
def __init__(self, location=None):
if location is None:
location = "../engine.conf"
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
<commit_msg>Improve config loader init function<commit_after>import configparser
import os
class ConfigLoader(object):
def __init__(self, location="../engine.conf"):
config_location = os.path.join(os.path.dirname(os.path.abspath(__file__)), location)
self.parser = configparser.ConfigParser()
self.parser.read(config_location)
self.web_debug = self.parser['WEB']['debug'].lower() == 'true'
self.checks_location = self.parser['GENERAL']['checks_location']
self.check_timeout = int(self.parser['GENERAL']['check_timeout'])
self.round_time_sleep = int(self.parser['GENERAL']['round_time_sleep'])
self.worker_refresh_time = int(self.parser['GENERAL']['worker_refresh_time'])
self.timezone = self.parser['GENERAL']['timezone']
self.db_uri = self.parser['DB']['uri']
self.redis_host = self.parser['REDIS']['host']
self.redis_port = int(self.parser['REDIS']['port'])
self.redis_password = self.parser['REDIS']['password']
|
d09e2831d95a2bc045da75496c70337246e77d5f
|
BoxAndWhisker.py
|
BoxAndWhisker.py
|
from matplotlib import pyplot
from PlotInfo import *
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
|
from matplotlib import pyplot
from PlotInfo import *
from Marker import Marker
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
self.flierMarker = Marker()
self.flierMarker.marker = '+'
self.flierMarker.color = 'b'
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
if self.flierMarker.marker is not None:
kwdict["sym"] = self.flierMarker.marker
else:
kwdict["sym"] = ''
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
|
Allow flier markers in box-and-whisker plots to be modified.
|
Allow flier markers in box-and-whisker plots to be modified.
|
Python
|
bsd-3-clause
|
alexras/boomslang
|
from matplotlib import pyplot
from PlotInfo import *
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
Allow flier markers in box-and-whisker plots to be modified.
|
from matplotlib import pyplot
from PlotInfo import *
from Marker import Marker
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
self.flierMarker = Marker()
self.flierMarker.marker = '+'
self.flierMarker.color = 'b'
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
if self.flierMarker.marker is not None:
kwdict["sym"] = self.flierMarker.marker
else:
kwdict["sym"] = ''
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
|
<commit_before>from matplotlib import pyplot
from PlotInfo import *
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
<commit_msg>Allow flier markers in box-and-whisker plots to be modified.<commit_after>
|
from matplotlib import pyplot
from PlotInfo import *
from Marker import Marker
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
self.flierMarker = Marker()
self.flierMarker.marker = '+'
self.flierMarker.color = 'b'
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
if self.flierMarker.marker is not None:
kwdict["sym"] = self.flierMarker.marker
else:
kwdict["sym"] = ''
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
|
from matplotlib import pyplot
from PlotInfo import *
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
Allow flier markers in box-and-whisker plots to be modified.from matplotlib import pyplot
from PlotInfo import *
from Marker import Marker
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
self.flierMarker = Marker()
self.flierMarker.marker = '+'
self.flierMarker.color = 'b'
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
if self.flierMarker.marker is not None:
kwdict["sym"] = self.flierMarker.marker
else:
kwdict["sym"] = ''
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
|
<commit_before>from matplotlib import pyplot
from PlotInfo import *
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
<commit_msg>Allow flier markers in box-and-whisker plots to be modified.<commit_after>from matplotlib import pyplot
from PlotInfo import *
from Marker import Marker
class BoxAndWhisker(PlotInfo):
"""
Box and whisker plots
"""
def __init__(self):
super(BoxAndWhisker,self).__init__("boxplot")
self.width=None
self.color="black"
self.label = None
self.xSequence = []
self.flierMarker = Marker()
self.flierMarker.marker = '+'
self.flierMarker.color = 'b'
def draw(self, fig, axis, transform=None):
# To be compatible with PlotInfo assumptions
self.xValues = range(1,len(self.xSequence)+1)
self.yValues = [0 for x in self.xValues]
super(BoxAndWhisker,self).draw(fig, axis)
kwdict = {}
if self.flierMarker.marker is not None:
kwdict["sym"] = self.flierMarker.marker
else:
kwdict["sym"] = ''
plotHandles = axis.boxplot(self.xSequence, **kwdict)
# Picking which part of the plot to use in the legend may
# require more thought as there are multiple lines in the
# boxplot, as well as the possibility for outliers.
# Options are ['medians', 'fliers', 'whiskers', 'boxes', 'caps']
return [plotHandles['medians'], [self.label]]
|
04d5fef36b778d2a2d1f217e85ed919e50c75c9a
|
es_enas/controllers/regularized_evolution_controller.py
|
es_enas/controllers/regularized_evolution_controller.py
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.generators.RegularizedEvolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.generators.evolution_mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.evolution.regularized_evolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.evolution.mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
|
Replace deprecated PyGlove symbols to new ones.
|
Replace deprecated PyGlove symbols to new ones.
PiperOrigin-RevId: 408683720
|
Python
|
apache-2.0
|
google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research,google-research/google-research
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.generators.RegularizedEvolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.generators.evolution_mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
Replace deprecated PyGlove symbols to new ones.
PiperOrigin-RevId: 408683720
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.evolution.regularized_evolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.evolution.mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
|
<commit_before># coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.generators.RegularizedEvolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.generators.evolution_mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
<commit_msg>Replace deprecated PyGlove symbols to new ones.
PiperOrigin-RevId: 408683720<commit_after>
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.evolution.regularized_evolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.evolution.mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.generators.RegularizedEvolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.generators.evolution_mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
Replace deprecated PyGlove symbols to new ones.
PiperOrigin-RevId: 408683720# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.evolution.regularized_evolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.evolution.mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
|
<commit_before># coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.generators.RegularizedEvolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.generators.evolution_mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
<commit_msg>Replace deprecated PyGlove symbols to new ones.
PiperOrigin-RevId: 408683720<commit_after># coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Regularized Evolution controller from PyGlove. Similar to NEAT."""
import numpy as np
import pyglove as pg
from es_enas.controllers import base_controller
class RegularizedEvolutionController(base_controller.BaseController):
"""Regularized Evolution Controller."""
def __init__(self, dna_spec, batch_size,
**kwargs):
"""Initialization. See base class for more details."""
super().__init__(dna_spec, batch_size)
population_size = self._batch_size
tournament_size = int(np.sqrt(population_size))
self._controller = pg.evolution.regularized_evolution(
population_size=population_size,
tournament_size=tournament_size,
mutator=pg.evolution.mutators.Uniform()) # pytype: disable=wrong-arg-types # gen-stub-imports
self._controller.setup(self._dna_spec)
|
abbae774903165f19d144905ad77553ec913c78d
|
saves.py
|
saves.py
|
import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
with open(os.path.join("characters", obj.name), 'w') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return None
|
import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
if not os.path.isdir("characters"):
os.makedirs("characters")
with open(os.path.join("characters", obj.name), 'w+') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return None
|
Fix for non-existent character folder bug
|
Fix for non-existent character folder bug
|
Python
|
mit
|
benjamincongdon/adept
|
import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
with open(os.path.join("characters", obj.name), 'w') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return NoneFix for non-existent character folder bug
|
import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
if not os.path.isdir("characters"):
os.makedirs("characters")
with open(os.path.join("characters", obj.name), 'w+') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return None
|
<commit_before>import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
with open(os.path.join("characters", obj.name), 'w') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return None<commit_msg>Fix for non-existent character folder bug<commit_after>
|
import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
if not os.path.isdir("characters"):
os.makedirs("characters")
with open(os.path.join("characters", obj.name), 'w+') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return None
|
import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
with open(os.path.join("characters", obj.name), 'w') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return NoneFix for non-existent character folder bugimport os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
if not os.path.isdir("characters"):
os.makedirs("characters")
with open(os.path.join("characters", obj.name), 'w+') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return None
|
<commit_before>import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
with open(os.path.join("characters", obj.name), 'w') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return None<commit_msg>Fix for non-existent character folder bug<commit_after>import os
import json
from serializable import Serializable
class Saves: #Only partially implemembted- only works for PlayerCharacters
#Because we want the user to be able to play on whatever world they want with whatever character
#they want, characters have to be stored independently of everything else
#We need to implement an aspect of this for things like the map, item locations, npc locations,
#And everything that has to do with the world state in a different location.
@staticmethod
def store(obj): #Currently only works for player characters
serialization = obj.serialize()
if obj.__class__.__name__ == "PlayerCharacter":
if not os.path.isdir("characters"):
os.makedirs("characters")
with open(os.path.join("characters", obj.name), 'w+') as f:
json.dump(serialization, f)
else:
pass #Needs to be implemented for saving the map and world state
@staticmethod
def unstore(name, path): #Currently only works for player characters
if os.path.isfile(os.path.join(path, name)):
with open(os.path.join(path, name), 'r') as f:
saved = json.load(f)
return Serializable.deserialize(saved)
return None
|
c4f7530da8d0a94d8fc7d51a5f0d6ad653d16196
|
stack-builder/hiera_config.py
|
stack-builder/hiera_config.py
|
#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
|
#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '=' + str(value) + '\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
|
Revert "Wrap fact exports in quotes"
|
Revert "Wrap fact exports in quotes"
This reverts commit bf5b568b05066b3d31b3c7c1f56ef86d4c5c3dca.
Conflicts:
stack-builder/hiera_config.py
|
Python
|
apache-2.0
|
CiscoSystems/puppet_openstack_builder--to-be-deleted,michaeltchapman/vagrant-consul,CiscoSystems/puppet_openstack_builder--to-be-deleted,michaeltchapman/puppet_openstack_builder,CiscoSystems/puppet_openstack_builder--to-be-deleted,michaeltchapman/vagrant-consul,CiscoSystems/openstack-installer--to-be-replaced-by-puppet_openstack_builder,CiscoSystems/puppet_openstack_builder,CiscoSystems/puppet_openstack_builder,phchoic/puppet_openstack_builder,phchoic/puppet_openstack_builder,phchoic/puppet_openstack_builder,CiscoSystems/openstack-installer--to-be-replaced-by-puppet_openstack_builder,michaeltchapman/puppet_openstack_builder,michaeltchapman/puppet_openstack_builder,CiscoSystems/openstack-installer--to-be-replaced-by-puppet_openstack_builder
|
#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
Revert "Wrap fact exports in quotes"
This reverts commit bf5b568b05066b3d31b3c7c1f56ef86d4c5c3dca.
Conflicts:
stack-builder/hiera_config.py
|
#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '=' + str(value) + '\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
|
<commit_before>#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
<commit_msg>Revert "Wrap fact exports in quotes"
This reverts commit bf5b568b05066b3d31b3c7c1f56ef86d4c5c3dca.
Conflicts:
stack-builder/hiera_config.py<commit_after>
|
#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '=' + str(value) + '\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
|
#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
Revert "Wrap fact exports in quotes"
This reverts commit bf5b568b05066b3d31b3c7c1f56ef86d4c5c3dca.
Conflicts:
stack-builder/hiera_config.py#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '=' + str(value) + '\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
|
<commit_before>#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '="' + str(value) + '"\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
<commit_msg>Revert "Wrap fact exports in quotes"
This reverts commit bf5b568b05066b3d31b3c7c1f56ef86d4c5c3dca.
Conflicts:
stack-builder/hiera_config.py<commit_after>#!/usr/bin/env python
"""
stack-builder.hiera_config
~~~~~~~~~~~~~~~~~~~~~~~~~~
This module will read metadata set during instance
launch and override any yaml under the /etc/puppet/data
directory (except data_mappings) that has a key matching
the metadata
"""
import yaml
import os
hiera_dir = '/etc/puppet/data'
metadata_path = '/root/config.yaml'
#debug
#metadata_path = './sample.json'
#hiera_dir = './openstack-installer/data/'
# Child processes cannot set environment variables, so
# create a bash file to set some exports for facter
def facter_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
print meta
with open('/root/fact_exports', 'w') as facts:
for key,value in meta.items():
# Things with spaces can't be exported
if ' ' not in str(value):
facts.write('FACTER_' + str(key) + '=' + str(value) + '\n')
#TODO
def hostname_config():
with open(metadata_path, 'r') as metadata:
meta = yaml.load(metadata.read())
with open('/root/openstack-installer/manifests/setup.pp', 'a') as facts:
for key,value in meta.items():
pass
facter_config()
|
474eda82f332a645193c1806dbaf840b8d506a65
|
sigma_core/serializers/cluster.py
|
sigma_core/serializers/cluster.py
|
from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
from sigma_core.serializers.user import UserWithPermsSerializer
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users = UserWithPermsSerializer(read_only=True, many=True)
|
from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users_ids = serializers.PrimaryKeyRelatedField(read_only=True, many=True, source='users')
|
Use only foreign keys in Cluster serialisation and add _id suffixes
|
Use only foreign keys in Cluster serialisation and add _id suffixes
|
Python
|
agpl-3.0
|
ProjetSigma/backend,ProjetSigma/backend
|
from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
from sigma_core.serializers.user import UserWithPermsSerializer
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users = UserWithPermsSerializer(read_only=True, many=True)
Use only foreign keys in Cluster serialisation and add _id suffixes
|
from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users_ids = serializers.PrimaryKeyRelatedField(read_only=True, many=True, source='users')
|
<commit_before>from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
from sigma_core.serializers.user import UserWithPermsSerializer
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users = UserWithPermsSerializer(read_only=True, many=True)
<commit_msg>Use only foreign keys in Cluster serialisation and add _id suffixes<commit_after>
|
from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users_ids = serializers.PrimaryKeyRelatedField(read_only=True, many=True, source='users')
|
from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
from sigma_core.serializers.user import UserWithPermsSerializer
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users = UserWithPermsSerializer(read_only=True, many=True)
Use only foreign keys in Cluster serialisation and add _id suffixesfrom rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users_ids = serializers.PrimaryKeyRelatedField(read_only=True, many=True, source='users')
|
<commit_before>from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
from sigma_core.serializers.user import UserWithPermsSerializer
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users = UserWithPermsSerializer(read_only=True, many=True)
<commit_msg>Use only foreign keys in Cluster serialisation and add _id suffixes<commit_after>from rest_framework import serializers
from sigma_core.models.cluster import Cluster
from sigma_core.serializers.group import GroupSerializer
class BasicClusterSerializer(serializers.ModelSerializer):
"""
Serialize Cluster model without memberships.
"""
class Meta:
model = Cluster
exclude = ('resp_group',
'req_rank_invite',
'req_rank_kick',
'req_rank_accept_join_requests',
'req_rank_promote',
'req_rank_demote',
'req_rank_modify_group_infos',
'default_member_rank',
'protected',
'private')
class ClusterSerializer(BasicClusterSerializer):
"""
Serialize Cluster model with memberships.
"""
class Meta(BasicClusterSerializer.Meta):
pass
users_ids = serializers.PrimaryKeyRelatedField(read_only=True, many=True, source='users')
|
11c3a7a9cfc2f86bc6df06d16caf0950cbedf6d6
|
setup.py
|
setup.py
|
from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.4',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
|
from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.5',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
|
Update the version to 0.0.5
|
Update the version to 0.0.5
|
Python
|
mit
|
xethorn/sukimu
|
from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.4',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
Update the version to 0.0.5
|
from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.5',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
|
<commit_before>from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.4',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
<commit_msg>Update the version to 0.0.5<commit_after>
|
from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.5',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
|
from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.4',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
Update the version to 0.0.5from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.5',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
|
<commit_before>from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.4',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
<commit_msg>Update the version to 0.0.5<commit_after>from setuptools import find_packages
from setuptools import setup
setup(
name='Sukimu',
version='0.0.5',
url='https://github.com/xethorn/sukimu',
author='Michael Ortali',
author_email='github@xethorn.net',
description=(
'Standardized way to perform CRUD operations with Field validation'),
license='MIT',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
],)
|
ec25dc5bbca6a652ca39616ffc780e200cd29257
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
|
from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
|
Update plotly requirement to >=4.0.0
|
Update plotly requirement to >=4.0.0
|
Python
|
bsd-3-clause
|
justincely/cos_monitoring
|
from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
Update plotly requirement to >=4.0.0
|
from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
<commit_msg>Update plotly requirement to >=4.0.0<commit_after>
|
from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
|
from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
Update plotly requirement to >=4.0.0from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
<commit_msg>Update plotly requirement to >=4.0.0<commit_after>from setuptools import setup, find_packages
setup(
name='cosmo',
version='0.0.1',
description='Monitors for HST/COS',
keywords=['astronomy'],
classifiers=[
'Programming Language :: Python :: 3',
'License :: BSD-3 :: Association of Universities for Research in Astronomy',
'Operating System :: Linux'
],
python_requires='~=3.7', # 3.7 and higher, but not 4
packages=find_packages(),
install_requires=[
'setuptools',
'numpy>=1.11.1',
'astropy>=1.0.1',
'plotly>=4.0.0',
'scipy',
'pyfastcopy',
'dask',
'pandas',
'pytest',
'pyyaml',
'peewee',
'monitorframe @ git+https://github.com/spacetelescope/monitor-framework#egg=monitorframe'
]
)
|
4f8379370b67d4ac25fd9538571cdc541091e97d
|
reviewboard/accounts/urls.py
|
reviewboard/accounts/urls.py
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
|
Fix password_reset_confirm URL for django change.
|
Fix password_reset_confirm URL for django change.
Django changed the uid parameter to the password_reset_confirm view to be
base64-encoded instead of base36. This means our URL had to change a bit.
Trivial change.
|
Python
|
mit
|
custode/reviewboard,bkochendorfer/reviewboard,davidt/reviewboard,reviewboard/reviewboard,1tush/reviewboard,KnowNo/reviewboard,custode/reviewboard,reviewboard/reviewboard,1tush/reviewboard,KnowNo/reviewboard,reviewboard/reviewboard,brennie/reviewboard,bkochendorfer/reviewboard,beol/reviewboard,sgallagher/reviewboard,reviewboard/reviewboard,KnowNo/reviewboard,davidt/reviewboard,sgallagher/reviewboard,1tush/reviewboard,chipx86/reviewboard,brennie/reviewboard,1tush/reviewboard,brennie/reviewboard,beol/reviewboard,beol/reviewboard,KnowNo/reviewboard,beol/reviewboard,1tush/reviewboard,sgallagher/reviewboard,chipx86/reviewboard,1tush/reviewboard,custode/reviewboard,custode/reviewboard,1tush/reviewboard,sgallagher/reviewboard,chipx86/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,davidt/reviewboard,davidt/reviewboard,brennie/reviewboard,chipx86/reviewboard
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
Fix password_reset_confirm URL for django change.
Django changed the uid parameter to the password_reset_confirm view to be
base64-encoded instead of base36. This means our URL had to change a bit.
Trivial change.
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
|
<commit_before>from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
<commit_msg>Fix password_reset_confirm URL for django change.
Django changed the uid parameter to the password_reset_confirm view to be
base64-encoded instead of base36. This means our URL had to change a bit.
Trivial change.<commit_after>
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
|
from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
Fix password_reset_confirm URL for django change.
Django changed the uid parameter to the password_reset_confirm view to be
base64-encoded instead of base36. This means our URL had to change a bit.
Trivial change.from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
|
<commit_before>from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb36>[0-9A-Za-z]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
<commit_msg>Fix password_reset_confirm URL for django change.
Django changed the uid parameter to the password_reset_confirm view to be
base64-encoded instead of base36. This means our URL had to change a bit.
Trivial change.<commit_after>from __future__ import unicode_literals
from django.conf.urls import patterns, url
urlpatterns = patterns(
"reviewboard.accounts.views",
url(r'^register/$', 'account_register',
{'next_url': 'dashboard'}, name="register"),
url(r'^preferences/$', 'user_preferences', name="user-preferences"),
)
urlpatterns += patterns(
"django.contrib.auth.views",
url(r'^login/$', 'login',
{'template_name': 'accounts/login.html'},
name='login'),
url(r'^logout/$', 'logout_then_login', name='logout'),
url(r'^recover/$',
'password_reset',
{
'template_name': 'accounts/password_reset.html',
'email_template_name': 'accounts/password_reset_email.txt'
},
name='recover'),
url(r'^recover/done/$',
'password_reset_done',
{'template_name': 'accounts/password_reset_done.html'},
name='password_reset_done'),
url(r'^reset/(?P<uidb64>[0-9A-Za-z_\-]+)-(?P<token>.+)/$',
'password_reset_confirm',
{'template_name': 'accounts/password_reset_confirm.html'},
name='password-reset-confirm'),
url(r'^reset/done/$',
'password_reset_complete',
{'template_name': 'accounts/password_reset_complete.html'}),
)
|
9df2bae691e8613794be3713194db2420fc75385
|
gapipy/resources/dossier/transport_dossier.py
|
gapipy/resources/dossier/transport_dossier.py
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'features', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
Remove features from as-is fields on TransportDossier
|
Remove features from as-is fields on TransportDossier
Reflected as a model_collection_field
|
Python
|
mit
|
gadventures/gapipy
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'features', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
Remove features from as-is fields on TransportDossier
Reflected as a model_collection_field
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
<commit_before>from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'features', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
<commit_msg>Remove features from as-is fields on TransportDossier
Reflected as a model_collection_field<commit_after>
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'features', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
Remove features from as-is fields on TransportDossier
Reflected as a model_collection_fieldfrom __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
<commit_before>from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'features', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
<commit_msg>Remove features from as-is fields on TransportDossier
Reflected as a model_collection_field<commit_after>from __future__ import unicode_literals
from ..base import Resource
from .details import DossierDetail, DossierDetailsMixin
from .dossier_features import DossierFeature
class TransportDossier(Resource, DossierDetailsMixin):
_resource_name = 'transport_dossiers'
_as_is_fields = [
'id', 'href', 'capacity', 'private', 'name',
'dossier_segment',
]
_model_collection_fields = [
('details', DossierDetail),
('features', DossierFeature),
]
_date_time_fields_local = ['date_created', 'date_last_modified']
|
94d7dc0a116ff089ac5943dd795c9f02bc040788
|
waterbutler/providers/dropbox/settings.py
|
waterbutler/providers/dropbox/settings.py
|
try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropboxapi.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://content.dropboxapi.com/1/')
|
try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropbox.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://api-content.dropbox.com/1/')
|
Revert "Update drobox api urls h/t @felliott"
|
Revert "Update drobox api urls h/t @felliott"
This reverts commit 6d1612698c2e42ab60d521915f31ff08832e3745.
|
Python
|
apache-2.0
|
RCOSDP/waterbutler,CenterForOpenScience/waterbutler,felliott/waterbutler,Johnetordoff/waterbutler,rdhyee/waterbutler,TomBaxter/waterbutler
|
try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropboxapi.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://content.dropboxapi.com/1/')
Revert "Update drobox api urls h/t @felliott"
This reverts commit 6d1612698c2e42ab60d521915f31ff08832e3745.
|
try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropbox.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://api-content.dropbox.com/1/')
|
<commit_before>try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropboxapi.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://content.dropboxapi.com/1/')
<commit_msg>Revert "Update drobox api urls h/t @felliott"
This reverts commit 6d1612698c2e42ab60d521915f31ff08832e3745.<commit_after>
|
try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropbox.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://api-content.dropbox.com/1/')
|
try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropboxapi.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://content.dropboxapi.com/1/')
Revert "Update drobox api urls h/t @felliott"
This reverts commit 6d1612698c2e42ab60d521915f31ff08832e3745.try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropbox.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://api-content.dropbox.com/1/')
|
<commit_before>try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropboxapi.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://content.dropboxapi.com/1/')
<commit_msg>Revert "Update drobox api urls h/t @felliott"
This reverts commit 6d1612698c2e42ab60d521915f31ff08832e3745.<commit_after>try:
from waterbutler import settings
except ImportError:
settings = {}
config = settings.get('DROPBOX_PROVIDER_CONFIG', {})
BASE_URL = config.get('BASE_URL', 'https://api.dropbox.com/1/')
BASE_CONTENT_URL = config.get('BASE_CONTENT_URL', 'https://api-content.dropbox.com/1/')
|
b6db1edc503222d3e954168d12e2a17b9387fc5b
|
bddbot/dealer.py
|
bddbot/dealer.py
|
from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open("features.bank", "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir("features")
with open("features/all.feature", "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
|
from os.path import join
from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
FEATURE_BANK_FILENAME = "features.bank"
FEATURES_DIRECTORY = "features"
OUTPUT_FEATURES_FILENAME = join(FEATURES_DIRECTORY, "all.feature")
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open(FEATURE_BANK_FILENAME, "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir(FEATURES_DIRECTORY)
with open(OUTPUT_FEATURES_FILENAME, "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
|
Replace magic strings with constants
|
Replace magic strings with constants
|
Python
|
mit
|
nivbend/bdd_bot
|
from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open("features.bank", "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir("features")
with open("features/all.feature", "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
Replace magic strings with constants
|
from os.path import join
from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
FEATURE_BANK_FILENAME = "features.bank"
FEATURES_DIRECTORY = "features"
OUTPUT_FEATURES_FILENAME = join(FEATURES_DIRECTORY, "all.feature")
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open(FEATURE_BANK_FILENAME, "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir(FEATURES_DIRECTORY)
with open(OUTPUT_FEATURES_FILENAME, "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
|
<commit_before>from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open("features.bank", "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir("features")
with open("features/all.feature", "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
<commit_msg>Replace magic strings with constants<commit_after>
|
from os.path import join
from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
FEATURE_BANK_FILENAME = "features.bank"
FEATURES_DIRECTORY = "features"
OUTPUT_FEATURES_FILENAME = join(FEATURES_DIRECTORY, "all.feature")
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open(FEATURE_BANK_FILENAME, "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir(FEATURES_DIRECTORY)
with open(OUTPUT_FEATURES_FILENAME, "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
|
from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open("features.bank", "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir("features")
with open("features/all.feature", "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
Replace magic strings with constantsfrom os.path import join
from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
FEATURE_BANK_FILENAME = "features.bank"
FEATURES_DIRECTORY = "features"
OUTPUT_FEATURES_FILENAME = join(FEATURES_DIRECTORY, "all.feature")
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open(FEATURE_BANK_FILENAME, "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir(FEATURES_DIRECTORY)
with open(OUTPUT_FEATURES_FILENAME, "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
|
<commit_before>from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open("features.bank", "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir("features")
with open("features/all.feature", "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
<commit_msg>Replace magic strings with constants<commit_after>from os.path import join
from os import mkdir, getcwd
from bank import split_bank
from errors import BotError
FEATURE_BANK_FILENAME = "features.bank"
FEATURES_DIRECTORY = "features"
OUTPUT_FEATURES_FILENAME = join(FEATURES_DIRECTORY, "all.feature")
class Dealer(object):
def __init__(self):
self.__feature = ""
self.__scenarios = []
def assign(self):
try:
with open(FEATURE_BANK_FILENAME, "rb") as bank_input:
(header, self.__feature, self.__scenarios) = split_bank(bank_input.read())
except IOError:
raise BotError("No features bank in {:s}".format(getcwd()))
if not self.__feature:
print("No more scenarios to deal")
mkdir(FEATURES_DIRECTORY)
with open(OUTPUT_FEATURES_FILENAME, "wb") as features:
features.write(header)
features.write(self.__feature)
if self.__scenarios:
features.write(self.__scenarios[0])
|
77541e5b3956d9e6b130810211fcae10de29eb85
|
tests/integration/base.py
|
tests/integration/base.py
|
import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
assert False
|
import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
|
Make the integration test fail, so we can see the request / response
|
Make the integration test fail, so we can see the request / response
|
Python
|
unlicense
|
michaeljoseph/righteous,michaeljoseph/righteous
|
import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
assert False
Make the integration test fail, so we can see the request / response
|
import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
|
<commit_before>import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
assert False
<commit_msg>Make the integration test fail, so we can see the request / response<commit_after>
|
import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
|
import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
assert False
Make the integration test fail, so we can see the request / responseimport righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
|
<commit_before>import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
assert False
<commit_msg>Make the integration test fail, so we can see the request / response<commit_after>import righteous
from ConfigParser import SafeConfigParser
from ..compat import unittest
class RighteousIntegrationTestCase(unittest.TestCase):
def setUp(self):
config = SafeConfigParser()
config.read('righteous.config')
if not config.has_section('auth'):
raise Exception('Please create a righteous.config file with '
'appropriate credentials')
self.auth = dict(
(key, config.get('auth', key))
for key in config.options('auth'))
self.server = dict(
(key, config.get('server-defaults', key))
for key in config.options('server-defaults'))
righteous.init(
self.auth['username'], self.auth['password'],
self.auth['account_id'], **self.server)
self.config = config
self.username = self.auth['username']
def test_login(self):
self.assertTrue(righteous.login())
|
bc5a78c0ddd635e27fe1f1daf3907094e7ba71cc
|
ain7/organizations/filters.py
|
ain7/organizations/filters.py
|
# -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field': ['icontains'],
}
|
# -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field__field': ['icontains'],
}
|
Fix incorrect search on activity fields
|
Fix incorrect search on activity fields
|
Python
|
lgpl-2.1
|
ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org
|
# -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field': ['icontains'],
}
Fix incorrect search on activity fields
|
# -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field__field': ['icontains'],
}
|
<commit_before># -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field': ['icontains'],
}
<commit_msg>Fix incorrect search on activity fields<commit_after>
|
# -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field__field': ['icontains'],
}
|
# -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field': ['icontains'],
}
Fix incorrect search on activity fields# -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field__field': ['icontains'],
}
|
<commit_before># -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field': ['icontains'],
}
<commit_msg>Fix incorrect search on activity fields<commit_after># -*- coding: utf-8
"""
ain7/organizations/filters.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import django_filters
from ain7.organizations.models import Organization
class OrganizationFilter(django_filters.FilterSet):
class Meta:
model = Organization
fields = {
'name': ['icontains'],
'activity_field__field': ['icontains'],
}
|
643e04ec09612d6f36dcd98dba44e00011674353
|
fito/data_store/dict_ds.py
|
fito/data_store/dict_ds.py
|
from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
|
from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
def clean(self):
self.data = {}
|
Clean method for dict ds
|
Clean method for dict ds
|
Python
|
mit
|
elsonidoq/fito
|
from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
Clean method for dict ds
|
from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
def clean(self):
self.data = {}
|
<commit_before>from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
<commit_msg>Clean method for dict ds<commit_after>
|
from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
def clean(self):
self.data = {}
|
from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
Clean method for dict dsfrom fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
def clean(self):
self.data = {}
|
<commit_before>from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
<commit_msg>Clean method for dict ds<commit_after>from fito.data_store.base import BaseDataStore
class DictDataStore(BaseDataStore):
def __init__(self, *args, **kwargs):
super(DictDataStore, self).__init__(*args, **kwargs)
self.data = {}
def iteritems(self):
return self.data.iteritems()
def save(self, spec, object):
self.data[spec] = object
def _get(self, spec):
if spec not in self.data: raise KeyError("Spec not found: {}".format(spec))
return self.data.get(spec)
def iterkeys(self):
return self.data.iterkeys()
def clean(self):
self.data = {}
|
d12916352166f8c3fcff37a9a3cdd58b0ed3aa5c
|
setup.py
|
setup.py
|
import os
import re
import sys
from os.path import dirname, join as pjoin
from sys import version_info
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
'''
Publish to PyPi.
'''
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.1'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
import os
import re
import sys
from sys import version_info
from os.path import dirname, join as pjoin
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
"""
Publish to PyPi.
"""
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.2'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
Upgrade orderedmultidict dependency to v0.7.2.
|
Upgrade orderedmultidict dependency to v0.7.2.
|
Python
|
unlicense
|
Gerhut/furl,guiquanz/furl,lastfm/furl,penyatree/furl
|
import os
import re
import sys
from os.path import dirname, join as pjoin
from sys import version_info
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
'''
Publish to PyPi.
'''
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.1'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
Upgrade orderedmultidict dependency to v0.7.2.
|
import os
import re
import sys
from sys import version_info
from os.path import dirname, join as pjoin
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
"""
Publish to PyPi.
"""
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.2'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
<commit_before>import os
import re
import sys
from os.path import dirname, join as pjoin
from sys import version_info
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
'''
Publish to PyPi.
'''
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.1'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
<commit_msg>Upgrade orderedmultidict dependency to v0.7.2.<commit_after>
|
import os
import re
import sys
from sys import version_info
from os.path import dirname, join as pjoin
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
"""
Publish to PyPi.
"""
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.2'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
import os
import re
import sys
from os.path import dirname, join as pjoin
from sys import version_info
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
'''
Publish to PyPi.
'''
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.1'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
Upgrade orderedmultidict dependency to v0.7.2.import os
import re
import sys
from sys import version_info
from os.path import dirname, join as pjoin
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
"""
Publish to PyPi.
"""
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.2'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
<commit_before>import os
import re
import sys
from os.path import dirname, join as pjoin
from sys import version_info
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
'''
Publish to PyPi.
'''
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.1'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
<commit_msg>Upgrade orderedmultidict dependency to v0.7.2.<commit_after>import os
import re
import sys
from sys import version_info
from os.path import dirname, join as pjoin
from setuptools import setup, find_packages
with open(pjoin(dirname(__file__), 'furl', '__init__.py')) as fd:
VERSION = re.compile(
r".*__version__ = '(.*?)'", re.S).match(fd.read()).group(1)
if sys.argv[-1] == 'publish':
"""
Publish to PyPi.
"""
os.system('python setup.py sdist upload')
sys.exit()
long_description = (
'Information and documentation at https://github.com/gruns/furl.')
setup(name='furl',
version=VERSION,
author='Arthur Grunseid',
author_email='grunseid@gmail.com',
url='https://github.com/gruns/furl',
license='Unlicense',
description='URL manipulation made simple.',
long_description=long_description,
packages=find_packages(),
include_package_data=True,
platforms=['any'],
classifiers=['Topic :: Internet',
'Natural Language :: English',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
install_requires=['orderedmultidict >= 0.7.2'],
test_suite='tests',
tests_require=[] if version_info[0:2] >= [2, 7] else ['unittest2'],
)
|
72e6a282f5669a420fe68069149a51b91f7c93fe
|
setup.py
|
setup.py
|
import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx',
],
)
|
import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx'
],
dependency_links = [
'-e git://github.com/cupy/cupy.git#egg=cupy'
]
)
|
Add a dependency link for cupy
|
Add a dependency link for cupy
|
Python
|
mit
|
IshitaTakeshi/PCANet
|
import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx',
],
)
Add a dependency link for cupy
|
import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx'
],
dependency_links = [
'-e git://github.com/cupy/cupy.git#egg=cupy'
]
)
|
<commit_before>import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx',
],
)
<commit_msg>Add a dependency link for cupy<commit_after>
|
import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx'
],
dependency_links = [
'-e git://github.com/cupy/cupy.git#egg=cupy'
]
)
|
import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx',
],
)
Add a dependency link for cupyimport os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx'
],
dependency_links = [
'-e git://github.com/cupy/cupy.git#egg=cupy'
]
)
|
<commit_before>import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx',
],
)
<commit_msg>Add a dependency link for cupy<commit_after>import os
from setuptools import setup
setup(
name="pcanet",
version="0.0.1",
author="Takeshi Ishita",
py_modules=["pcanet"],
install_requires=[
'chainer',
'numpy',
'psutil',
'recommonmark',
'scikit-learn',
'scipy',
'sphinx'
],
dependency_links = [
'-e git://github.com/cupy/cupy.git#egg=cupy'
]
)
|
876a6ff5f09786ba42cf6a354c0acc77265840ca
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
|
Change the development status classifier to beta
|
Change the development status classifier to beta
|
Python
|
bsd-3-clause
|
armicron/plata,stefanklug/plata,allink/plata,armicron/plata,armicron/plata
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
Change the development status classifier to beta
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
<commit_msg>Change the development status classifier to beta<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
Change the development status classifier to beta#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
<commit_msg>Change the development status classifier to beta<commit_after>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
import plata
setup(name='Plata',
version=plata.__version__,
description='Plata - the lean and mean Django-based Shop',
long_description=open(os.path.join(os.path.dirname(__file__), 'README.rst')).read(),
author='Matthias Kestenholz',
author_email='mk@feinheit.ch',
url='https://github.com/matthiask/plata/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries :: Application Frameworks',
],
packages=find_packages(),
include_package_data=True,
zip_safe=False,
)
|
c4cf302fc3799bef800615f5c744b015c9ae5f75
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd-hooks-python',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
|
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd_hooks',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
|
Change name back to dredd_hooks
|
Change name back to dredd_hooks
|
Python
|
mit
|
apiaryio/dredd-hooks-python
|
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd-hooks-python',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
Change name back to dredd_hooks
|
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd_hooks',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd-hooks-python',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
<commit_msg>Change name back to dredd_hooks<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd_hooks',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
|
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd-hooks-python',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
Change name back to dredd_hooks# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd_hooks',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
|
<commit_before># -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd-hooks-python',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
<commit_msg>Change name back to dredd_hooks<commit_after># -*- coding: utf-8 -*-
from setuptools import find_packages, setup
long_desc = open('README.rst').read()
setup(
name='dredd_hooks',
version='0.1.3',
url='https://github.com/apiaryio/dredd-hooks-python/',
download_url='http://pypi.python.org/pypi/dredd_hooks',
license='MIT License',
author='Vilibald Wanča',
author_email='wvi@apiary.io',
maintainer='Apiary',
maintainer_email='support@apiary.io',
description='Python Hooks Bridge for Dredd API Testing Framework',
long_description=long_desc,
zip_safe=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Build Tools',
],
keywords='HTTP API testing Dredd',
platforms='any',
packages=find_packages(),
include_package_data=True,
entry_points={
'console_scripts': [
'dredd-hooks-python = dredd_hooks.__main__:main'
],
},
tests_require=['flake8'],
test_suite='test',
)
|
c09bd08d5f46b2831dc5af94cd97f614f7ed3d59
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
|
Add Python 3.2 to package classifiers
|
Add Python 3.2 to package classifiers
|
Python
|
isc
|
gears/gears-handlebars,gears/gears-handlebars
|
import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
Add Python 3.2 to package classifiers
|
import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Add Python 3.2 to package classifiers<commit_after>
|
import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
|
import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
Add Python 3.2 to package classifiersimport os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Add Python 3.2 to package classifiers<commit_after>import os
from setuptools import setup, find_packages
def read(filename):
return open(os.path.join(os.path.dirname(__file__), filename)).read()
setup(
name='gears-handlebars',
version='0.1.2',
url='https://github.com/gears/gears-handlebars',
license='ISC',
author='Mike Yumatov',
author_email='mike@yumatov.org',
description='Handlebars compiler for Gears',
long_description=read('README.rst'),
packages=find_packages(),
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.2',
],
)
|
0b29388834077eb0c8f9292c2f7ec28fc7f36dde
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.0",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.2",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
Update pysaml2 dependency to resolve bugs related to attribute filtering.
|
Update pysaml2 dependency to resolve bugs related to attribute filtering.
|
Python
|
apache-2.0
|
irtnog/SATOSA,irtnog/SATOSA,its-dirg/SATOSA,SUNET/SATOSA,SUNET/SATOSA
|
#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.0",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
Update pysaml2 dependency to resolve bugs related to attribute filtering.
|
#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.2",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
<commit_before>#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.0",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
<commit_msg>Update pysaml2 dependency to resolve bugs related to attribute filtering.<commit_after>
|
#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.2",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.0",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
Update pysaml2 dependency to resolve bugs related to attribute filtering.#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.2",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
<commit_before>#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.0",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
<commit_msg>Update pysaml2 dependency to resolve bugs related to attribute filtering.<commit_after>#!/usr/bin/env python
"""
setup.py
"""
from setuptools import setup, find_packages
setup(
name='SATOSA',
version='0.4.1',
description='Protocol proxy (SAML/OIDC).',
author='DIRG',
author_email='dirg@its.umu.se',
license='Apache 2.0',
url='https://github.com/its-dirg/SATOSA',
packages=find_packages('src/'),
package_dir={'': 'src'},
scripts=["tools/make_satosa_saml_metadata.py"],
install_requires=[
"pluginbase",
"future",
"oic",
"pyjwkest",
"pysaml2 >= 4.0.2",
"requests",
"PyYAML",
"pycrypto",
"gunicorn",
"Werkzeug"
],
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3 :: Only"
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
|
ac0a337f5d3b65af2b96e772a00d06c73626454c
|
setup.py
|
setup.py
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo", "cookiecutter", "bumpversion"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
|
Add extra-requirements for release management
|
Add extra-requirements for release management
|
Python
|
mit
|
bardin-lab/readtagger,bardin-lab/readtagger
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
Add extra-requirements for release management
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo", "cookiecutter", "bumpversion"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
<commit_msg>Add extra-requirements for release management<commit_after>
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo", "cookiecutter", "bumpversion"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
|
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
Add extra-requirements for release managementtry:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo", "cookiecutter", "bumpversion"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
|
<commit_before>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
<commit_msg>Add extra-requirements for release management<commit_after>try:
from setuptools import setup
except ImportError:
from distutils.core import setup
requirements = ['contextlib2', 'pysam', 'six', 'shutilwhich']
ENTRY_POINTS = '''
[console_scripts]
tag_reads=tag_reads.tag_reads:main
allow_dovetailing=tag_reads.allow_dovetailing:main
'''
setup(
name='tag_reads',
version='0.1.5',
packages=['tag_reads'],
install_requires=requirements,
entry_points=ENTRY_POINTS,
keywords='Bioinformatics',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: POSIX',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
],
extras_require={
'testing': ["pytest", "pytest-datadir", "tox", "planemo", "cookiecutter", "bumpversion"],
},
url='https://github.com/bardin-lab/tag_reads',
license='MIT',
author='Marius van den Beek',
author_email='m.vandenbeek@gmail.com',
description='Tags reads in BAM files based on alignments in additional BAM files.'
)
|
42e3fb06868211a01884108ff404a5318002d498
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.2',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.3',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
Change the version to tag it.
|
[Feature] : Change the version to tag it.
|
Python
|
mit
|
upstox/upstox-python
|
from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.2',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)[Feature] : Change the version to tag it.
|
from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.3',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
<commit_before>from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.2',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)<commit_msg>[Feature] : Change the version to tag it.<commit_after>
|
from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.3',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.2',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)[Feature] : Change the version to tag it.from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.3',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
<commit_before>from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.2',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)<commit_msg>[Feature] : Change the version to tag it.<commit_after>from setuptools import setup
setup(
name = 'upstox',
packages = ['upstox_api'],
version = '1.5.3',
include_package_data=True,
description = 'Official Python library for Upstox APIs',
author = 'Upstox Development Team',
author_email = 'support@upstox.com',
url = 'https://github.com/upstox/upstox-python',
install_requires=['future', 'requests', 'websocket_client'],
keywords = ['upstox', 'python', 'sdk', 'trading', 'stock markets'],
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
|
4a8ac74a4d32379eb6a366308e238452b6ba53b0
|
openstack_dashboard/dashboards/project/routers/forms.py
|
openstack_dashboard/dashboards/project/routers/forms.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = 'Router created "%s"' % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = _('Router %s was successfully created.') % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
|
Make 'Router created' message translatable
|
Make 'Router created' message translatable
Change-Id: If0e246157a72fd1cabdbbde77e0c057d9d611eaa
|
Python
|
apache-2.0
|
nvoron23/avos,henaras/horizon,tellesnobrega/horizon,CiscoSystems/avos,kaiweifan/horizon,eayunstack/horizon,Tesora/tesora-horizon,redhat-cip/horizon,redhat-openstack/horizon,yeming233/horizon,idjaw/horizon,BiznetGIO/horizon,wolverineav/horizon,damien-dg/horizon,openstack/horizon,kaiweifan/horizon,coreycb/horizon,citrix-openstack-build/horizon,davidcusatis/horizon,Metaswitch/horizon,openstack/horizon,endorphinl/horizon-fork,Mirantis/mos-horizon,anthonydillon/horizon,agileblaze/OpenStackTwoFactorAuthentication,izadorozhna/dashboard_integration_tests,yanheven/console,maestro-hybrid-cloud/horizon,flochaz/horizon,tsufiev/horizon,kfox1111/horizon,tsufiev/horizon,NCI-Cloud/horizon,citrix-openstack-build/horizon,Mirantis/mos-horizon,rickerc/horizon_audit,saydulk/horizon,r-icarus/openstack_microserver,bigswitch/horizon,Dark-Hacker/horizon,Daniex/horizon,j4/horizon,NCI-Cloud/horizon,liyitest/rr,tqtran7/horizon,luhanhan/horizon,xinwu/horizon,openstack-ja/horizon,j4/horizon,icloudrnd/automation_tools,pranavtendolkr/horizon,mdavid/horizon,spring-week-topos/horizon-week,yjxtogo/horizon,ikargis/horizon_fod,froyobin/horizon,tanglei528/horizon,icloudrnd/automation_tools,kaiweifan/horizon,bigswitch/horizon,mrunge/horizon,philoniare/horizon,yanheven/console,anthonydillon/horizon,Tesora/tesora-horizon,kfox1111/horizon,blueboxgroup/horizon,mrunge/openstack_horizon,bac/horizon,froyobin/horizon,mrunge/horizon,takeshineshiro/horizon,takeshineshiro/horizon,mrunge/openstack_horizon,rickerc/horizon_audit,coreycb/horizon,tsufiev/horizon,liyitest/rr,CiscoSystems/horizon,flochaz/horizon,liyitest/rr,promptworks/horizon,noironetworks/horizon,pranavtendolkr/horizon,newrocknj/horizon,tqtran7/horizon,BiznetGIO/horizon,Daniex/horizon,luhanhan/horizon,henaras/horizon,bigswitch/horizon,Hodorable/0602,FNST-OpenStack/horizon,noironetworks/horizon,takeshineshiro/horizon,zouyapeng/horizon,flochaz/horizon,Daniex/horizon,mandeepdhami/horizon,promptworks/horizon,sandvine/horizon,endorphinl/horizon,vladryk/horizon,netscaler/horizon,damien-dg/horizon,openstack/horizon,xme1226/horizon,spring-week-topos/horizon-week,ging/horizon,NeCTAR-RC/horizon,wangxiangyu/horizon,rdo-management/tuskar-ui,yanheven/console,newrocknj/horizon,VaneCloud/horizon,CiscoSystems/horizon,wangxiangyu/horizon,r-icarus/openstack_microserver,blueboxgroup/horizon,Dark-Hacker/horizon,izadorozhna/dashboard_integration_tests,philoniare/horizon,gochist/horizon,Mirantis/mos-horizon,davidcusatis/horizon,Hodorable/0602,damien-dg/horizon,nvoron23/avos,vladryk/horizon,xinwu/horizon,wolverineav/horizon,yeming233/horizon,CiscoSystems/avos,karthik-suresh/horizon,endorphinl/horizon,blueboxgroup/horizon,mrunge/openstack_horizon,Metaswitch/horizon,tellesnobrega/horizon,maestro-hybrid-cloud/horizon,xme1226/horizon,sandvine/horizon,noironetworks/horizon,yjxtogo/horizon,mdavid/horizon,tqtran7/horizon,ikargis/horizon_fod,blueboxgroup/horizon,yeming233/horizon,zouyapeng/horizon,RudoCris/horizon,orbitfp7/horizon,noironetworks/horizon,watonyweng/horizon,redhat-openstack/horizon,CiscoSystems/horizon,dan1/horizon-x509,django-leonardo/horizon,JioCloud/horizon,dan1/horizon-proto,kfox1111/horizon,ChameleonCloud/horizon,bac/horizon,bac/horizon,doug-fish/horizon,mrunge/horizon_lib,mandeepdhami/horizon,Dark-Hacker/horizon,gerrive/horizon,mrunge/horizon_lib,ChameleonCloud/horizon,xinwu/horizon,doug-fish/horizon,rickerc/horizon_audit,wolverineav/horizon,ging/horizon,damien-dg/horizon,mandeepdhami/horizon,JioCloud/horizon,karthik-suresh/horizon,tuskar/tuskar-ui,FNST-OpenStack/horizon,BiznetGIO/horizon,henaras/horizon,rdo-management/tuskar-ui,vladryk/horizon,maestro-hybrid-cloud/horizon,davidcusatis/horizon,Metaswitch/horizon,Tesora/tesora-horizon,mandeepdhami/horizon,ChameleonCloud/horizon,dan1/horizon-x509,CiscoSystems/horizon,VaneCloud/horizon,j4/horizon,dan1/horizon-proto,eayunstack/horizon,luhanhan/horizon,coreycb/horizon,wangxiangyu/horizon,j4/horizon,newrocknj/horizon,netscaler/horizon,openstack-ja/horizon,agileblaze/OpenStackTwoFactorAuthentication,CiscoSystems/avos,saydulk/horizon,Daniex/horizon,newrocknj/horizon,gochist/horizon,mrunge/horizon_lib,VaneCloud/horizon,endorphinl/horizon-fork,django-leonardo/horizon,dan1/horizon-proto,pranavtendolkr/horizon,dan1/horizon-x509,orbitfp7/horizon,redhat-cip/horizon,orbitfp7/horizon,luhanhan/horizon,netscaler/horizon,zouyapeng/horizon,gerrive/horizon,Hodorable/0602,agileblaze/OpenStackTwoFactorAuthentication,NeCTAR-RC/horizon,Mirantis/mos-horizon,liyitest/rr,pranavtendolkr/horizon,endorphinl/horizon-fork,bigswitch/horizon,aaronorosen/horizon-congress,watonyweng/horizon,idjaw/horizon,JioCloud/horizon,zouyapeng/horizon,django-leonardo/horizon,tellesnobrega/horizon,gerrive/horizon,icloudrnd/automation_tools,ikargis/horizon_fod,xinwu/horizon,FNST-OpenStack/horizon,tellesnobrega/horizon,yjxtogo/horizon,icloudrnd/automation_tools,wangxiangyu/horizon,RudoCris/horizon,dan1/horizon-proto,tqtran7/horizon,eayunstack/horizon,Metaswitch/horizon,NeCTAR-RC/horizon,saydulk/horizon,idjaw/horizon,r-icarus/openstack_microserver,NeCTAR-RC/horizon,mdavid/horizon,anthonydillon/horizon,openstack-ja/horizon,Dark-Hacker/horizon,RudoCris/horizon,froyobin/horizon,BiznetGIO/horizon,davidcusatis/horizon,orbitfp7/horizon,takeshineshiro/horizon,endorphinl/horizon,mrunge/horizon,gochist/horizon,Solinea/horizon,kfox1111/horizon,bac/horizon,ChameleonCloud/horizon,tsufiev/horizon,doug-fish/horizon,idjaw/horizon,vladryk/horizon,Solinea/horizon,spring-week-topos/horizon-week,karthik-suresh/horizon,NCI-Cloud/horizon,tuskar/tuskar-ui,maestro-hybrid-cloud/horizon,rdo-management/tuskar-ui,redhat-cip/horizon,redhat-cip/horizon,anthonydillon/horizon,doug-fish/horizon,watonyweng/horizon,redhat-openstack/horizon,redhat-openstack/horizon,philoniare/horizon,gerrive/horizon,xme1226/horizon,yjxtogo/horizon,VaneCloud/horizon,flochaz/horizon,mdavid/horizon,CiscoSystems/avos,henaras/horizon,aaronorosen/horizon-congress,Solinea/horizon,RudoCris/horizon,tanglei528/horizon,Tesora/tesora-horizon,coreycb/horizon,yeming233/horizon,promptworks/horizon,openstack/horizon,tanglei528/horizon,aaronorosen/horizon-congress,sandvine/horizon,karthik-suresh/horizon,citrix-openstack-build/horizon,Hodorable/0602,promptworks/horizon,philoniare/horizon,FNST-OpenStack/horizon,endorphinl/horizon-fork,dan1/horizon-x509,NCI-Cloud/horizon,ging/horizon,endorphinl/horizon,wolverineav/horizon,django-leonardo/horizon,rdo-management/tuskar-ui,tuskar/tuskar-ui,sandvine/horizon,ging/horizon,Solinea/horizon,watonyweng/horizon,nvoron23/avos,saydulk/horizon,agileblaze/OpenStackTwoFactorAuthentication,nvoron23/avos
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = 'Router created "%s"' % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
Make 'Router created' message translatable
Change-Id: If0e246157a72fd1cabdbbde77e0c057d9d611eaa
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = _('Router %s was successfully created.') % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = 'Router created "%s"' % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
<commit_msg>Make 'Router created' message translatable
Change-Id: If0e246157a72fd1cabdbbde77e0c057d9d611eaa<commit_after>
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = _('Router %s was successfully created.') % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = 'Router created "%s"' % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
Make 'Router created' message translatable
Change-Id: If0e246157a72fd1cabdbbde77e0c057d9d611eaa# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = _('Router %s was successfully created.') % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
|
<commit_before># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = 'Router created "%s"' % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
<commit_msg>Make 'Router created' message translatable
Change-Id: If0e246157a72fd1cabdbbde77e0c057d9d611eaa<commit_after># vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012, Nachi Ueno, NTT MCL, Inc.
# All rights reserved.
"""
Views for managing Quantum Routers.
"""
import logging
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from horizon import exceptions
from horizon import messages
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class CreateForm(forms.SelfHandlingForm):
name = forms.CharField(max_length="255", label=_("Router Name"))
failure_url = 'horizon:project:routers:index'
def __init__(self, request, *args, **kwargs):
super(CreateForm, self).__init__(request, *args, **kwargs)
def handle(self, request, data):
try:
router = api.quantum.router_create(request,
name=data['name'])
message = _('Router %s was successfully created.') % data['name']
messages.success(request, message)
return router
except:
msg = _('Failed to create router "%s".') % data['name']
LOG.info(msg)
redirect = reverse(self.failure_url)
exceptions.handle(request, msg, redirect=redirect)
return False
|
a8fd1f8a7e690e68abaaaee60d45ab556d3f015c
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
namespace_packages=['stoxy'],
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
|
from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
|
Remove arguably unneeded namespace declaration (STOX-10)
|
Remove arguably unneeded namespace declaration (STOX-10)
|
Python
|
apache-2.0
|
stoxy/libcdmi-python,stoxy/libcdmi-python
|
from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
namespace_packages=['stoxy'],
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
Remove arguably unneeded namespace declaration (STOX-10)
|
from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
namespace_packages=['stoxy'],
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
<commit_msg>Remove arguably unneeded namespace declaration (STOX-10)<commit_after>
|
from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
|
from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
namespace_packages=['stoxy'],
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
Remove arguably unneeded namespace declaration (STOX-10)from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
namespace_packages=['stoxy'],
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
<commit_msg>Remove arguably unneeded namespace declaration (STOX-10)<commit_after>from setuptools import setup, find_packages
setup(
name="libcdmi-python",
version='1.0alpha',
description="""CDMI client library""",
author="Ilja Livenson and Co",
author_email="ilja.livenson@gmail.com",
packages=find_packages(),
zip_safe=False, # martian grok scan is incompatible with zipped eggs
install_requires=[
"setuptools", # Redundant but removes a warning
],
)
|
7693bf3d0529c3688f8ed35f095a5a0fafea36a1
|
setup.py
|
setup.py
|
from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.0",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'tests'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
|
from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.1",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'pconf.store'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
|
Add missing files to package
|
Add missing files to package
|
Python
|
mit
|
andrasmaroy/pconf
|
from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.0",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'tests'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
Add missing files to package
|
from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.1",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'pconf.store'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
|
<commit_before>from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.0",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'tests'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
<commit_msg>Add missing files to package<commit_after>
|
from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.1",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'pconf.store'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
|
from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.0",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'tests'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
Add missing files to packagefrom os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.1",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'pconf.store'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
|
<commit_before>from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.0",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'tests'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
<commit_msg>Add missing files to package<commit_after>from os import path
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
return open(path.join(path.dirname(__file__), fname)).read()
setup(
name="pconf",
version="0.5.1",
author="Andras Maroy",
author_email="andras@maroy.hu",
description=("Hierarchical python configuration with files, environment variables, command-line arguments."),
license="MIT",
keywords="configuration hierarchical",
url="https://github.com/andrasmaroy/pconf",
packages=['pconf', 'pconf.store'],
long_description=read('README.md'),
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
install_requires=['pyyaml'],
extras_require={
'test': ['pytest', 'mock'],
},
)
|
91489b5a8e940f90c3aa70c6c8ed2dcf98c8ca9c
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==1.1.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
|
from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==2.20.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
|
Bump requests from 1.1.0 to 2.20.0
|
Bump requests from 1.1.0 to 2.20.0
Bumps [requests](https://github.com/requests/requests) from 1.1.0 to 2.20.0.
- [Release notes](https://github.com/requests/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/requests/requests/compare/v1.1.0...v2.20.0)
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>
|
Python
|
mit
|
claymation/python-builtwith
|
from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==1.1.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
Bump requests from 1.1.0 to 2.20.0
Bumps [requests](https://github.com/requests/requests) from 1.1.0 to 2.20.0.
- [Release notes](https://github.com/requests/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/requests/requests/compare/v1.1.0...v2.20.0)
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>
|
from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==2.20.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
|
<commit_before>from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==1.1.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
<commit_msg>Bump requests from 1.1.0 to 2.20.0
Bumps [requests](https://github.com/requests/requests) from 1.1.0 to 2.20.0.
- [Release notes](https://github.com/requests/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/requests/requests/compare/v1.1.0...v2.20.0)
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com><commit_after>
|
from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==2.20.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
|
from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==1.1.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
Bump requests from 1.1.0 to 2.20.0
Bumps [requests](https://github.com/requests/requests) from 1.1.0 to 2.20.0.
- [Release notes](https://github.com/requests/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/requests/requests/compare/v1.1.0...v2.20.0)
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com>from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==2.20.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
|
<commit_before>from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==1.1.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
<commit_msg>Bump requests from 1.1.0 to 2.20.0
Bumps [requests](https://github.com/requests/requests) from 1.1.0 to 2.20.0.
- [Release notes](https://github.com/requests/requests/releases)
- [Changelog](https://github.com/psf/requests/blob/master/HISTORY.md)
- [Commits](https://github.com/requests/requests/compare/v1.1.0...v2.20.0)
Signed-off-by: dependabot[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@github.com><commit_after>from setuptools import setup
setup(
name = "python-builtwith",
version = "0.2.2",
description = "BuiltWith API versions 1, 2 and 7 client",
author = "Clay McClure, Jon Gaulding, Andrew Harris",
author_email = "clay@daemons.net",
url = "https://github.com/claymation/python-builtwith",
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Topic :: Internet :: WWW/HTTP',
],
py_modules = ['builtwith'],
install_requires = [
'requests==2.20.0',
],
test_suite = 'nose.collector',
tests_require = [
'httpretty==0.5.12',
'nose==1.2.1',
]
)
|
e2d27eba4c751b63a3e25daef8f22d910fa47cdc
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=README,
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=(
'Simple app that works similar to wagtailimages,'
'but for embedding YouTube and Vimeo videos and music from SoundCloud. '
'It's an integration of django-embed-video.'
)
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
|
Fix NameError: name 'README' is not defined
|
Fix NameError: name 'README' is not defined
Traceback (most recent call last):
File "<string>", line 20, in <module>
File "/tmp/pip-g43cf6a2-build/setup.py", line 9, in <module>
long_description=README,
NameError: name 'README' is not defined
|
Python
|
bsd-3-clause
|
SalahAdDin/wagtail-embedvideos,SalahAdDin/wagtail-embedvideos,infoportugal/wagtail-embedvideos,SalahAdDin/wagtail-embedvideos,infoportugal/wagtail-embedvideos,infoportugal/wagtail-embedvideos
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=README,
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
Fix NameError: name 'README' is not defined
Traceback (most recent call last):
File "<string>", line 20, in <module>
File "/tmp/pip-g43cf6a2-build/setup.py", line 9, in <module>
long_description=README,
NameError: name 'README' is not defined
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=(
'Simple app that works similar to wagtailimages,'
'but for embedding YouTube and Vimeo videos and music from SoundCloud. '
'It's an integration of django-embed-video.'
)
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=README,
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
<commit_msg>Fix NameError: name 'README' is not defined
Traceback (most recent call last):
File "<string>", line 20, in <module>
File "/tmp/pip-g43cf6a2-build/setup.py", line 9, in <module>
long_description=README,
NameError: name 'README' is not defined<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=(
'Simple app that works similar to wagtailimages,'
'but for embedding YouTube and Vimeo videos and music from SoundCloud. '
'It's an integration of django-embed-video.'
)
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
|
#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=README,
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
Fix NameError: name 'README' is not defined
Traceback (most recent call last):
File "<string>", line 20, in <module>
File "/tmp/pip-g43cf6a2-build/setup.py", line 9, in <module>
long_description=README,
NameError: name 'README' is not defined#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=(
'Simple app that works similar to wagtailimages,'
'but for embedding YouTube and Vimeo videos and music from SoundCloud. '
'It's an integration of django-embed-video.'
)
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=README,
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
<commit_msg>Fix NameError: name 'README' is not defined
Traceback (most recent call last):
File "<string>", line 20, in <module>
File "/tmp/pip-g43cf6a2-build/setup.py", line 9, in <module>
long_description=README,
NameError: name 'README' is not defined<commit_after>#!/usr/bin/env python
from distutils.core import setup
setup(
name='wagtail_embed_videos',
version='0.0.5',
description='Embed Videos for Wagtail CMS.',
long_description=(
'Simple app that works similar to wagtailimages,'
'but for embedding YouTube and Vimeo videos and music from SoundCloud. '
'It's an integration of django-embed-video.'
)
author='Diogo Marques',
author_email='doriva.marques.29@gmail.com',
maintainer='Diogo Marques',
maintainer_email='doriva.marques.29@gmail.com',
url='https://github.com/infoportugal/wagtail-embedvideos',
packages=['wagtail_embed_videos', 'wagtail_embed_videos.views'],
package_data={'wagtail_embed_videos': ['static/wagtail_embed_videos/js/*.js']},
requires=['django(>=1.7)', 'wagtail(>=1.0)', 'django-embed-video(>=1.0)'],
install_requires=['wagtail', 'django-embed-video'],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Operating System :: OS Independent',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Framework :: Django',
'Framework :: Wagtail CMS',
'License :: OSI Approved :: BSD License'],
license='New BSD',
)
|
98d3fa0705b25468ce606ba309085f2da7c476b6
|
setup.py
|
setup.py
|
from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
)
|
from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku', 'pymoku.tools'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
zip_safe=False, # This isn't strictly true, but makes debugging easier on the device
)
|
Fix missing tools package install
|
HG-1871: Fix missing tools package install
|
Python
|
mit
|
liquidinstruments/pymoku
|
from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
)
HG-1871: Fix missing tools package install
|
from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku', 'pymoku.tools'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
zip_safe=False, # This isn't strictly true, but makes debugging easier on the device
)
|
<commit_before>from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
)
<commit_msg>HG-1871: Fix missing tools package install<commit_after>
|
from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku', 'pymoku.tools'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
zip_safe=False, # This isn't strictly true, but makes debugging easier on the device
)
|
from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
)
HG-1871: Fix missing tools package installfrom setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku', 'pymoku.tools'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
zip_safe=False, # This isn't strictly true, but makes debugging easier on the device
)
|
<commit_before>from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
)
<commit_msg>HG-1871: Fix missing tools package install<commit_after>from setuptools import setup
import subprocess, os
version = open('pymoku/version.txt').read().strip()
setup(
name='pymoku',
version=version,
author='Ben Nizette',
author_email='ben.nizette@liquidinstruments.com',
packages=['pymoku', 'pymoku.tools'],
package_dir={'pymoku': 'pymoku/'},
package_data={
'pymoku' : ['version.txt', '*.capnp', 'bin/*']
},
license='MIT',
long_description="Python scripting interface to the Liquid Instruments Moku:Lab",
url="https://github.com/liquidinstruments/pymoku",
download_url="https://github.com/liquidinstruments/pymoku/archive/%s.tar.gz" % version,
keywords=['moku', 'liquid instruments', 'test', 'measurement', 'lab', 'equipment'],
entry_points={
'console_scripts' : [
'moku=pymoku.tools.moku:main',
'moku_convert=pymoku.tools.moku_convert:main',
]
},
install_requires=[
'future',
'pyzmq>=15.3.0',
'six',
'urllib3',
'pyzmq',
'rfc6266',
'requests',
],
zip_safe=False, # This isn't strictly true, but makes debugging easier on the device
)
|
31c7be100ed36a39231b302d6306df51375384d1
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
packages=['braubuddy'],
scripts=[],
url='http://pypi.python.org/pypi/Braubuddy/',
license='LICENSE.txt',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
)
|
from setuptools import setup, find_packages
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
license='LICENSE.txt',
packages=find_packages(),
scripts=[],
tests='braubuddy.tests',
url='http://braubudy.org/',
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
Add automagic package finding and classifiers.
|
Add automagic package finding and classifiers.
|
Python
|
bsd-3-clause
|
amorphic/braubuddy,amorphic/braubuddy,amorphic/braubuddy
|
from setuptools import setup
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
packages=['braubuddy'],
scripts=[],
url='http://pypi.python.org/pypi/Braubuddy/',
license='LICENSE.txt',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
)
Add automagic package finding and classifiers.
|
from setuptools import setup, find_packages
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
license='LICENSE.txt',
packages=find_packages(),
scripts=[],
tests='braubuddy.tests',
url='http://braubudy.org/',
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before>from setuptools import setup
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
packages=['braubuddy'],
scripts=[],
url='http://pypi.python.org/pypi/Braubuddy/',
license='LICENSE.txt',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
)
<commit_msg>Add automagic package finding and classifiers.<commit_after>
|
from setuptools import setup, find_packages
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
license='LICENSE.txt',
packages=find_packages(),
scripts=[],
tests='braubuddy.tests',
url='http://braubudy.org/',
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
from setuptools import setup
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
packages=['braubuddy'],
scripts=[],
url='http://pypi.python.org/pypi/Braubuddy/',
license='LICENSE.txt',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
)
Add automagic package finding and classifiers.from setuptools import setup, find_packages
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
license='LICENSE.txt',
packages=find_packages(),
scripts=[],
tests='braubuddy.tests',
url='http://braubudy.org/',
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before>from setuptools import setup
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
packages=['braubuddy'],
scripts=[],
url='http://pypi.python.org/pypi/Braubuddy/',
license='LICENSE.txt',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
)
<commit_msg>Add automagic package finding and classifiers.<commit_after>from setuptools import setup, find_packages
setup(
name='braubuddy',
version='0.2.0',
author='James Stewart',
author_email='jstewart101@gmail.com',
description='An extensile thermostat framework',
long_description=open('README.rst').read(),
license='LICENSE.txt',
packages=find_packages(),
scripts=[],
tests='braubuddy.tests',
url='http://braubudy.org/',
entry_points={
'console_scripts': [
'braubuddy = braubuddy.runserver:main',
]
},
install_requires=[
'pyserial>=2.0',
'tosr0x>=0.2.0',
'temperusb>=1.2.0',
'ds18b20>=0.01.03',
'cherrypy>=3.2.2',
'pyxdg>=0.25',
'jinja2>=2.7.0',
'alabaster>=0.6.0',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
7f3efb14c41cdc9fcfda28fa67046eecc18c6f34
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
'ptpython%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[0],
'ptpython%s.%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[:2],
'ptipython%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[0],
'ptipython%s.%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[:2],
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
|
Add `pt[i]pythonX` and `pt[i]pythonX.X` commands
|
Add `pt[i]pythonX` and `pt[i]pythonX.X` commands
|
Python
|
bsd-3-clause
|
jonathanslenders/ptpython
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
Add `pt[i]pythonX` and `pt[i]pythonX.X` commands
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
'ptpython%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[0],
'ptpython%s.%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[:2],
'ptipython%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[0],
'ptipython%s.%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[:2],
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
<commit_msg>Add `pt[i]pythonX` and `pt[i]pythonX.X` commands<commit_after>
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
'ptpython%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[0],
'ptpython%s.%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[:2],
'ptipython%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[0],
'ptipython%s.%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[:2],
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
Add `pt[i]pythonX` and `pt[i]pythonX.X` commands#!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
'ptpython%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[0],
'ptpython%s.%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[:2],
'ptipython%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[0],
'ptipython%s.%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[:2],
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
|
<commit_before>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
<commit_msg>Add `pt[i]pythonX` and `pt[i]pythonX.X` commands<commit_after>#!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
long_description = open(
os.path.join(
os.path.dirname(__file__),
'README.rst'
)
).read()
setup(
name='ptpython',
author='Jonathan Slenders',
version='0.34',
url='https://github.com/jonathanslenders/ptpython',
description='Python REPL build on top of prompt_toolkit',
long_description=long_description,
packages=find_packages('.'),
install_requires = [
'docopt',
'jedi>=0.9.0',
'prompt_toolkit>=1.0.0,<2.0.0',
'pygments',
],
entry_points={
'console_scripts': [
'ptpython = ptpython.entry_points.run_ptpython:run',
'ptipython = ptpython.entry_points.run_ptipython:run',
'ptpython%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[0],
'ptpython%s.%s = ptpython.entry_points.run_ptpython:run' % sys.version_info[:2],
'ptipython%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[0],
'ptipython%s.%s = ptpython.entry_points.run_ptipython:run' % sys.version_info[:2],
]
},
extra_require={
'ptipython': ['ipython'] # For ptipython, we need to have IPython
}
)
|
e2462e51f760d0f22d52ae3c758c5a2bf34d4b63
|
setup.py
|
setup.py
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
Make sure ext.csrf is installed with WTForms
|
Make sure ext.csrf is installed with WTForms
|
Python
|
bsd-3-clause
|
pawl/wtforms,pawl/wtforms,wtforms/wtforms,subyraman/wtforms,crast/wtforms,skytreader/wtforms,Aaron1992/wtforms,jmagnusson/wtforms,hsum/wtforms,Xender/wtforms,cklein/wtforms,Aaron1992/wtforms
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
Make sure ext.csrf is installed with WTForms
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
<commit_before>import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
<commit_msg>Make sure ext.csrf is installed with WTForms<commit_after>
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
Make sure ext.csrf is installed with WTFormsimport os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
<commit_before>import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
<commit_msg>Make sure ext.csrf is installed with WTForms<commit_after>import os, sys
sys.path.insert(0, os.path.abspath(os.path.dirname(__file__)))
from distutils.core import setup
import wtforms
setup(
name='WTForms',
version=wtforms.__version__,
url='http://wtforms.simplecodes.com/',
license='BSD',
author='Thomas Johansson, James Crasta',
author_email='wtforms@simplecodes.com',
description='A flexible forms validation and rendering library for python web development.',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=[
'wtforms',
'wtforms.fields',
'wtforms.widgets',
'wtforms.ext',
'wtforms.ext.appengine',
'wtforms.ext.csrf',
'wtforms.ext.dateutil',
'wtforms.ext.django',
'wtforms.ext.django.templatetags',
'wtforms.ext.sqlalchemy',
]
)
|
90818970d8f8a14bb110ada4e524b874688e9770
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme + '\n\n' + history,
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme,
long_description_content_type='text/x-rst',
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
|
Remove history from long description
|
Remove history from long description
|
Python
|
bsd-3-clause
|
wtolson/gnsq,wtolson/gnsq
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme + '\n\n' + history,
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
Remove history from long description
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme,
long_description_content_type='text/x-rst',
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme + '\n\n' + history,
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
<commit_msg>Remove history from long description<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme,
long_description_content_type='text/x-rst',
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme + '\n\n' + history,
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
Remove history from long description#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme,
long_description_content_type='text/x-rst',
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme + '\n\n' + history,
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
<commit_msg>Remove history from long description<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
setup(
name='gnsq',
version='1.0.0',
description='A gevent based python client for NSQ.',
long_description=readme,
long_description_content_type='text/x-rst',
author='Trevor Olson',
author_email='trevor@heytrevor.com',
url='https://github.com/wtolson/gnsq',
packages=[
'gnsq',
'gnsq.contrib',
'gnsq.stream',
],
package_dir={'gnsq': 'gnsq'},
include_package_data=True,
install_requires=[
'blinker',
'gevent',
'six',
'urllib3',
],
extras_require={
'snappy': ['python-snappy'],
},
license="BSD",
zip_safe=False,
keywords='gnsq',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
]
)
|
aac2d31f5ae03628d70b71f7d9e87654aef1bdd5
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pytoml", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
|
#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pyfits", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
|
Add pyfits dependence and remove pytoml
|
Add pyfits dependence and remove pytoml
|
Python
|
bsd-3-clause
|
ziotom78/polycomp
|
#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pytoml", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
Add pyfits dependence and remove pytoml
|
#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pyfits", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
|
<commit_before>#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pytoml", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
<commit_msg>Add pyfits dependence and remove pytoml<commit_after>
|
#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pyfits", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
|
#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pytoml", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
Add pyfits dependence and remove pytoml#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pyfits", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
|
<commit_before>#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pytoml", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
<commit_msg>Add pyfits dependence and remove pytoml<commit_after>#!/usr/bin/env python3
# -*- mode: python -*-
from setuptools import setup, find_packages
from setuptools.extension import Extension
from Cython.Build import cythonize
import os.path as path
modules = [Extension("pypolycomp._bindings",
sources=["pypolycomp/_bindings.pyx"],
libraries=["polycomp"])]
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(name="polycomp",
version="1.0",
author="Maurizio Tomasi",
author_email="ziotom78@gmail.com",
description="Python bindings to the libpolycomp C library",
long_description=long_description,
license="MIT",
url="",
install_requires=["cython", "pyfits", "docopt"],
ext_modules=cythonize(modules),
scripts=['polycomp'],
packages=['pypolycomp'],
keywords='compression astronomy fits',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: System :: Archiving :: Compression',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
])
|
422420cc2a0f63e204d7589e58a6deac5fb90f1f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='sim-dice',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
|
from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dice-sim',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
|
Set package name to 'dice-sim'
|
Set package name to 'dice-sim'
|
Python
|
lgpl-2.1
|
samuller/dice
|
from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='sim-dice',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
Set package name to 'dice-sim'
|
from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dice-sim',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
|
<commit_before>from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='sim-dice',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
<commit_msg>Set package name to 'dice-sim'<commit_after>
|
from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dice-sim',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
|
from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='sim-dice',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
Set package name to 'dice-sim'from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dice-sim',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
|
<commit_before>from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='sim-dice',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
<commit_msg>Set package name to 'dice-sim'<commit_after>from setuptools import setup, find_packages
from os import path
from dice import __version__
here = path.abspath(path.dirname(__file__))
try:
with open(path.join(here, 'requirements.txt')) as f:
requirements = f.read().splitlines()
except:
requirements = []
# Get the long description from the README file
# pandoc --from=markdown --to=rst --output=README.rst README.md
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dice-sim',
version=__version__,
author='Simon Muller',
author_email='samullers@gmail.com',
url='https://github.com/samuller/dice',
description='Simulate various dice throw situations',
long_description=long_description,
py_modules=['dice'],
packages=find_packages(exclude=['*.tests*']),
install_requires=requirements,
include_package_data=True,
entry_points={
'console_scripts': [
'dice=dice:main',
],
},
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Other Audience',
'License :: OSI Approved :: GNU Lesser General Public License v2 (LGPLv2)',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Games/Entertainment',
'Topic :: Games/Entertainment :: Board Games',
'Topic :: Utilities',
],
)
|
94c5b819f66201090df1b66d439050449c23ac6e
|
setup.py
|
setup.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include()],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
from pathlib import Path
from sysconfig import get_path
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include(), Path(get_path('data')) / 'include'],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
|
Add include path for voro++
|
Add include path for voro++
|
Python
|
mit
|
malramsay64/MD-Molecules-Hoomd,malramsay64/MD-Molecules-Hoomd
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include()],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
Add include path for voro++
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
from pathlib import Path
from sysconfig import get_path
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include(), Path(get_path('data')) / 'include'],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include()],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
<commit_msg>Add include path for voro++<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
from pathlib import Path
from sysconfig import get_path
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include(), Path(get_path('data')) / 'include'],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include()],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
Add include path for voro++#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
from pathlib import Path
from sysconfig import get_path
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include(), Path(get_path('data')) / 'include'],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include()],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
<commit_msg>Add include path for voro++<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Malcolm Ramsay <malramsay64@gmail.com>
#
# Distributed under terms of the MIT license.
"""Command line tool to run simulations."""
from pathlib import Path
from sysconfig import get_path
import numpy as np
from Cython.Build import cythonize
from setuptools import find_packages, setup
from setuptools.extension import Extension
extensions = [
Extension(
'statdyn.analysis.order',
['statdyn/analysis/order.pyx'],
language='c++',
libraries=['m', 'voro++'],
include_dirs=[np.get_include(), Path(get_path('data')) / 'include'],
),
Extension(
'statdyn.math_helper',
['statdyn/math_helper.pyx'],
libraries=['m'],
include_dirs=[np.get_include()],
),
]
setup(
name='statdyn',
use_scm_version={'version_scheme': 'post-release'},
setup_requires=['setuptools_scm', ],
packages=find_packages(),
ext_modules=cythonize(extensions),
include_package_data=True,
entry_points="""
[console_scripts]
sdrun=statdyn.sdrun.main:sdrun
""",
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.