commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f19471414a1517fad1bca015c9ba3aaa4e8e8fda
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2']
)
|
from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2'],
entry_points = {
'console_scripts': [
'glcreate = gitoriouslib.cmd:create_repo',
'gldelete = gitoriouslib.cmd:delete_repo',
]
},
)
|
Add entry point scripts for CLI create/delete
|
Add entry point scripts for CLI create/delete
|
Python
|
apache-2.0
|
locke105/gitoriouslib
|
from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2']
)
Add entry point scripts for CLI create/delete
|
from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2'],
entry_points = {
'console_scripts': [
'glcreate = gitoriouslib.cmd:create_repo',
'gldelete = gitoriouslib.cmd:delete_repo',
]
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2']
)
<commit_msg>Add entry point scripts for CLI create/delete<commit_after>
|
from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2'],
entry_points = {
'console_scripts': [
'glcreate = gitoriouslib.cmd:create_repo',
'gldelete = gitoriouslib.cmd:delete_repo',
]
},
)
|
from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2']
)
Add entry point scripts for CLI create/deletefrom setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2'],
entry_points = {
'console_scripts': [
'glcreate = gitoriouslib.cmd:create_repo',
'gldelete = gitoriouslib.cmd:delete_repo',
]
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2']
)
<commit_msg>Add entry point scripts for CLI create/delete<commit_after>from setuptools import setup, find_packages
setup(
name = "gitoriouslib",
version = "0.1",
packages = find_packages(),
install_requires = ['httplib2>=0.7.2'],
entry_points = {
'console_scripts': [
'glcreate = gitoriouslib.cmd:create_repo',
'gldelete = gitoriouslib.cmd:delete_repo',
]
},
)
|
58faca0b65d2d64f1112f323bd843fc2b23fd086
|
setup.py
|
setup.py
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.5',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.6',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
Increment version after fix to frozendict version
|
Increment version after fix to frozendict version
|
Python
|
bsd-3-clause
|
consbio/gis-metadata-parser
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.5',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
Increment version after fix to frozendict version
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.6',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
<commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.5',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Increment version after fix to frozendict version<commit_after>
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.6',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.5',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
Increment version after fix to frozendict versionimport subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.6',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
<commit_before>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.5',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
<commit_msg>Increment version after fix to frozendict version<commit_after>import subprocess
import sys
from setuptools import Command, setup
class RunTests(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
errno = subprocess.call([sys.executable, '-m', 'unittest', 'gis_metadata.tests.tests'])
raise SystemExit(errno)
with open('README.md') as readme:
long_description = readme.read()
setup(
name='gis-metadata-parser',
description='Parser for GIS metadata standards including FGDC and ISO-19115',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='arcgis,fgdc,iso,ISO-19115,ISO-19139,gis,metadata,parser,xml,gis_metadata,gis_metadata_parser',
version='1.2.6',
packages=[
'gis_metadata', 'gis_metadata.tests'
],
install_requires=[
'frozendict==1.2', 'parserutils>=1.2.3', 'six>=1.9.0'
],
tests_require=['mock'],
url='https://github.com/consbio/gis-metadata-parser',
license='BSD',
cmdclass={'test': RunTests}
)
|
4c95937d43f6ec769412b0cb8b58546ecb5617ec
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1',
description='Yet another interface module for Python',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/0.1',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1.0',
description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
Change the tagline for PyPI
|
Change the tagline for PyPI
|
Python
|
mit
|
jongiddy/jute,jongiddy/jute
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1',
description='Yet another interface module for Python',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/0.1',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
Change the tagline for PyPI
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1.0',
description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
<commit_before>from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1',
description='Yet another interface module for Python',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/0.1',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
<commit_msg>Change the tagline for PyPI<commit_after>
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1.0',
description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1',
description='Yet another interface module for Python',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/0.1',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
Change the tagline for PyPIfrom distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1.0',
description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
<commit_before>from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1',
description='Yet another interface module for Python',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/0.1',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
<commit_msg>Change the tagline for PyPI<commit_after>from distutils.core import setup
setup(
name='jute',
packages=['jute'],
package_dir={'jute': 'python3/jute'},
version='0.1.0',
description='An interface module that verifies both providers and callers',
author='Jonathan Patrick Giddy',
author_email='jongiddy@gmail.com',
url='https://github.com/jongiddy/jute',
download_url='https://github.com/jongiddy/jute/tarball/v0.1.0',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Natural Language :: English",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
4d1d4c6730c2d09ec387267f5eebbcc24f70ed38
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.3.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.4.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
|
Upgrade requests-facebook version to 0.4.0
|
Upgrade requests-facebook version to 0.4.0
|
Python
|
bsd-2-clause
|
michaelhelmick/requests-facebook
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.3.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
Upgrade requests-facebook version to 0.4.0
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.4.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.3.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
<commit_msg>Upgrade requests-facebook version to 0.4.0<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.4.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.3.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
Upgrade requests-facebook version to 0.4.0#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.4.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.3.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
<commit_msg>Upgrade requests-facebook version to 0.4.0<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(
name='requests-facebook',
version='0.4.0',
install_requires=['requests>=1.0.0'],
author='Mike Helmick',
author_email='me@michaelhelmick.com',
license='BSD',
url='https://github.com/michaelhelmick/requests-facebook/',
keywords='python facebook requests graph oauth oauth2 api',
description='A Python Library to interface with Facebook Graph API',
long_description=open('README.rst').read(),
download_url='https://github.com/michaelhelmick/requests-facebook/zipball/master',
py_modules=['facebook'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Communications :: Chat',
'Topic :: Internet'
]
)
|
9b2a016c5652d65aa8f223c0554d1766721ac2a8
|
setup.py
|
setup.py
|
import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
|
import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
|
Remove 3.1 and 3.2 from python_requires (and classifiers) to allow it to still install for those versions
|
Remove 3.1 and 3.2 from python_requires (and classifiers) to allow it to still install for those versions
|
Python
|
mit
|
auth0/auth0-python,auth0/auth0-python
|
import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
Remove 3.1 and 3.2 from python_requires (and classifiers) to allow it to still install for those versions
|
import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
|
<commit_before>import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
<commit_msg>Remove 3.1 and 3.2 from python_requires (and classifiers) to allow it to still install for those versions<commit_after>
|
import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
|
import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
Remove 3.1 and 3.2 from python_requires (and classifiers) to allow it to still install for those versionsimport io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
|
<commit_before>import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
<commit_msg>Remove 3.1 and 3.2 from python_requires (and classifiers) to allow it to still install for those versions<commit_after>import io
import os
import re
from setuptools import setup, find_packages
def find_version():
file_dir = os.path.dirname(__file__)
with io.open(os.path.join(file_dir, 'auth0', '__init__.py')) as f:
version = re.search(r'^__version__ = [\'"]([^\'"]*)[\'"]', f.read())
if version:
return version.group(1)
else:
raise RuntimeError("Unable to find version string.")
setup(
name='auth0-python',
version=find_version(),
description='Auth0 Python SDK',
author='Auth0',
author_email='support@auth0.com',
license='MIT',
packages=find_packages(),
install_requires=['requests'],
extras_require={'test': ['mock']},
python_requires='>=2.7, !=3.0.*, !=3.1.*',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/auth0/auth0-python',
)
|
44e88adcc2ba62892828e1ec98543dff9218524a
|
setup.py
|
setup.py
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
packages = find_packages()
print packages
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=packages,
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=True)
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
})
|
Address review comments, remove zip_safe=True because GeoIP
|
Address review comments, remove zip_safe=True because GeoIP
|
Python
|
mit
|
pebble/pypkjs
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
packages = find_packages()
print packages
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=packages,
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=True)
Address review comments, remove zip_safe=True because GeoIP
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
})
|
<commit_before>__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
packages = find_packages()
print packages
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=packages,
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=True)
<commit_msg>Address review comments, remove zip_safe=True because GeoIP<commit_after>
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
})
|
__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
packages = find_packages()
print packages
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=packages,
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=True)
Address review comments, remove zip_safe=True because GeoIP__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
})
|
<commit_before>__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
packages = find_packages()
print packages
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=packages,
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
},
zip_safe=True)
<commit_msg>Address review comments, remove zip_safe=True because GeoIP<commit_after>__author__ = 'katharine'
import sys
from setuptools import setup, find_packages
requires = [
'backports.ssl-match-hostname==3.4.0.2',
'gevent>=1.1b5',
'gevent-websocket==0.9.3',
'greenlet==0.4.9',
'peewee==2.4.7',
'pygeoip==0.3.2',
'pypng==0.0.17',
'python-dateutil==2.4.1',
'requests==2.5.0',
'sh==1.09',
'six==1.9.0',
'websocket-client==0.31.0',
'wsgiref==0.1.2',
'libpebble2==0.0.12',
'netaddr==0.7.18'
]
setup(name='pypkjs',
version='3.6',
description='PebbleKit JS in Python!',
url='https://github.com/pebble/pypkjs',
author='Pebble Technology Corporation',
author_email='katharine@pebble.com',
license='MIT',
packages=find_packages(),
install_requires=requires,
entry_points={
'console_scripts': [
'pypkjs=runner.websocket:run_tool'
],
})
|
b1e5ed0fa032550395c18fbebab629144e5fed36
|
setup.py
|
setup.py
|
import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['mmstats', 'slurpstats', 'mmash', 'mmash_settings'],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
|
import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['libgettid',
'mmstats',
'slurpstats',
'mmash',
'mmash_settings'
],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
|
Add missing libgettid to py_modules
|
Add missing libgettid to py_modules
|
Python
|
bsd-3-clause
|
schmichael/mmstats,schmichael/mmstats,schmichael/mmstats,schmichael/mmstats
|
import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['mmstats', 'slurpstats', 'mmash', 'mmash_settings'],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
Add missing libgettid to py_modules
|
import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['libgettid',
'mmstats',
'slurpstats',
'mmash',
'mmash_settings'
],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
|
<commit_before>import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['mmstats', 'slurpstats', 'mmash', 'mmash_settings'],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
<commit_msg>Add missing libgettid to py_modules<commit_after>
|
import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['libgettid',
'mmstats',
'slurpstats',
'mmash',
'mmash_settings'
],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
|
import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['mmstats', 'slurpstats', 'mmash', 'mmash_settings'],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
Add missing libgettid to py_modulesimport sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['libgettid',
'mmstats',
'slurpstats',
'mmash',
'mmash_settings'
],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
|
<commit_before>import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['mmstats', 'slurpstats', 'mmash', 'mmash_settings'],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
<commit_msg>Add missing libgettid to py_modules<commit_after>import sys
from setuptools import setup
from setuptools.extension import Extension
#XXX gettid only works on Linux, don't bother else
if 'linux' in sys.platform:
exts = [Extension('_libgettid', sources=['_libgettid.c'])]
else:
exts = []
setup(
name='mmstats',
version='0.1',
license='BSD',
author='Michael Schurter',
author_email='m@schmichael.com',
description='Stat publishing and consuming tools',
py_modules=['libgettid',
'mmstats',
'slurpstats',
'mmash',
'mmash_settings'
],
ext_modules=exts,
install_requires=['Flask'],
classifiers=['License :: OSI Approved :: BSD License'],
zip_safe=False,
)
|
556eada791fca84432e36c3e1fcf722ecf0580ff
|
setup.py
|
setup.py
|
from __future__ import absolute_import
from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
|
from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
|
Remove an overlooked from __future__ import
|
Remove an overlooked from __future__ import
|
Python
|
agpl-3.0
|
scraperwiki/databaker,scraperwiki/databaker
|
from __future__ import absolute_import
from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
Remove an overlooked from __future__ import
|
from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
|
<commit_before>from __future__ import absolute_import
from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
<commit_msg>Remove an overlooked from __future__ import<commit_after>
|
from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
|
from __future__ import absolute_import
from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
Remove an overlooked from __future__ importfrom setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
|
<commit_before>from __future__ import absolute_import
from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
<commit_msg>Remove an overlooked from __future__ import<commit_after>from setuptools import setup, find_packages
long_desc = """
Transform Excel spreadsheets
"""
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for classifiers
conf = dict(
name='databaker',
version='1.2.1',
description="DataBaker, part of QuickCode for ONS",
long_description=long_desc,
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
keywords='',
author='The Sensible Code Company Ltd',
author_email='feedback@sensiblecode.io',
url='https://github.com/sensiblecodeio/databaker',
license='AGPL',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=[],
include_package_data=False,
zip_safe=False,
install_requires=['docopt', 'xypath>=1.1.0', 'xlutils', 'pyhamcrest'],
tests_require=[],
entry_points={},
)
if __name__ == '__main__':
setup(**conf)
|
510b52357e6266faec6338abd8fed46102acceca
|
setup.py
|
setup.py
|
# Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.2',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
|
# Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.3',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
|
Bump version: 0.0.2 -> 0.0.3
|
Bump version: 0.0.2 -> 0.0.3
PyPI won't let me delete the 0.0.2 release, so we have to issue a new one to add the project URL.
|
Python
|
mit
|
jameshy/libtree,conceptsandtraining/libtree
|
# Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.2',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
Bump version: 0.0.2 -> 0.0.3
PyPI won't let me delete the 0.0.2 release, so we have to issue a new one to add the project URL.
|
# Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.3',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
|
<commit_before># Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.2',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
<commit_msg>Bump version: 0.0.2 -> 0.0.3
PyPI won't let me delete the 0.0.2 release, so we have to issue a new one to add the project URL.<commit_after>
|
# Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.3',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
|
# Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.2',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
Bump version: 0.0.2 -> 0.0.3
PyPI won't let me delete the 0.0.2 release, so we have to issue a new one to add the project URL.# Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.3',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
|
<commit_before># Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.2',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
<commit_msg>Bump version: 0.0.2 -> 0.0.3
PyPI won't let me delete the 0.0.2 release, so we have to issue a new one to add the project URL.<commit_after># Copyright (c) 2015 CaT Concepts and Training GmbH
try:
from setuptools import setup, find_packages, Command
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages, Command
import platform
import os
import subprocess
import sys
if platform.python_implementation() == 'PyPy':
psycopg2_dependency = 'psycopg2cffi==2.7.1'
else:
psycopg2_dependency = 'psycopg2==2.6.1'
setup(
name='libtree',
version='0.0.3',
author='Fabian Kochem',
author_email='fabian.kochem@concepts-and-training.de',
description='Postgres-based library to handle and persist wide trees',
url='https://github.com/conceptsandtraining/libtree',
# Dependencies
install_requires=[
psycopg2_dependency
],
entry_points={},
packages=find_packages(),
zip_safe=False,
include_package_data=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Database',
'Topic :: Software Development :: Libraries'
],
)
|
5d57e9a6a456a6919bb6c39bd34dad76a2a6356f
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
|
#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
extra_compile_args=['-std=gnu99'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
|
Add gnu99 build flag for linuxy builds
|
Add gnu99 build flag for linuxy builds
|
Python
|
bsd-3-clause
|
helium/helium-client-python,helium/helium-client-python
|
#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
Add gnu99 build flag for linuxy builds
|
#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
extra_compile_args=['-std=gnu99'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
|
<commit_before>#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
<commit_msg>Add gnu99 build flag for linuxy builds<commit_after>
|
#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
extra_compile_args=['-std=gnu99'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
|
#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
Add gnu99 build flag for linuxy builds#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
extra_compile_args=['-std=gnu99'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
|
<commit_before>#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
<commit_msg>Add gnu99 build flag for linuxy builds<commit_after>#!/usr/bin/env python
"""
setup.py file for helium-client-python
"""
from distutils.core import setup, Extension
sourcefiles = ['src/helium_client.c',
'src/helium-serial.c',
'src/helium-client/helium-client.c',
'src/helium-client/cauterize/atom_api.c',
'src/helium-client/cauterize/atom_api_message.c',
'src/helium-client/cauterize/cauterize.c']
extensions = [Extension('helium_client',
include_dirs=['src/helium-client'],
extra_compile_args=['-std=gnu99'],
sources=sourcefiles)]
setup(name='helium-client',
version='0.1',
author="Helium Client",
description="""Python interface to the Helium Atom""",
ext_modules=extensions)
|
cb7d601f6e983df6a9633804f5483470f7dce27f
|
setup.py
|
setup.py
|
__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)
|
__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client',
'wheel'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)
|
Make sure wheel is installed.
|
Make sure wheel is installed.
|
Python
|
bsd-3-clause
|
tylerdave/devpi-builder
|
__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)Make sure wheel is installed.
|
__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client',
'wheel'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)
|
<commit_before>__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)<commit_msg>Make sure wheel is installed.<commit_after>
|
__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client',
'wheel'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)
|
__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)Make sure wheel is installed.__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client',
'wheel'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)
|
<commit_before>__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)<commit_msg>Make sure wheel is installed.<commit_after>__author__ = 'mbach'
import multiprocessing # avoid crash on teardown
from setuptools import setup, find_packages
setup(
name = 'Brandon',
version = '0.1-dev',
packages = find_packages(exclude=['tests']),
author='Matthias Bach',
author_email='matthias.bach@blue-yonder.com',
description='Fill in index with wheels from an requirements.txt-like specification file.',
license='Proprietary',
install_requires=[
'setuptools',
'devpi-client',
'wheel'
],
setup_requires=[
'nose>=1.0'
],
tests_require=[
'nose>=1.0',
'devpi-server'
],
test_suite='nose.collector',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: Other/Proprietary License',
'Topic :: System :: Archiving :: Packaging'
],
entry_points={
'console_scripts': [
'nbsystem = brandon.cli:main',
],
},
)
|
c3c48bdeeb772a43491f73690215aede6d171516
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
install_requires=[
'netaddr',
],
**extra
)
|
#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
**extra
)
|
Revert "Added netaddr as an install-time requirement."
|
Revert "Added netaddr as an install-time requirement."
This reverts commit a0bf258c63ad71b7301b51dcc8e470d43b4a9c82.
|
Python
|
lgpl-2.1
|
raushan802/qav,UMIACS/qav
|
#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
install_requires=[
'netaddr',
],
**extra
)
Revert "Added netaddr as an install-time requirement."
This reverts commit a0bf258c63ad71b7301b51dcc8e470d43b4a9c82.
|
#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
**extra
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
install_requires=[
'netaddr',
],
**extra
)
<commit_msg>Revert "Added netaddr as an install-time requirement."
This reverts commit a0bf258c63ad71b7301b51dcc8e470d43b4a9c82.<commit_after>
|
#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
**extra
)
|
#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
install_requires=[
'netaddr',
],
**extra
)
Revert "Added netaddr as an install-time requirement."
This reverts commit a0bf258c63ad71b7301b51dcc8e470d43b4a9c82.#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
**extra
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
install_requires=[
'netaddr',
],
**extra
)
<commit_msg>Revert "Added netaddr as an install-time requirement."
This reverts commit a0bf258c63ad71b7301b51dcc8e470d43b4a9c82.<commit_after>#!/usr/bin/env python
try:
from setuptools import setup
extra = dict(test_suite="tests.test.suite", include_package_data=True)
except ImportError:
from distutils.core import setup
extra = {}
long_description = \
'''
qav is a Python library for console-based question and answering, with the
ability to validate input.
'''
from qav import __version__
setup(
name='qav',
version=__version__,
author='Derek Yarnell',
author_email='derek@umiacs.umd.edu',
packages=['qav'],
url='https://github.com/UMIACS/qav',
license='MIT',
description='Question Answer Validation',
long_description=long_description,
**extra
)
|
a9cd9a95f387fd62058b9fbb5597ef64e1e8c422
|
setup.py
|
setup.py
|
from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'airflow>=1.7.0',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
|
from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'apache-airflow>=1.8.1',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
|
Update to Apache Airflow 1.8.1
|
Update to Apache Airflow 1.8.1
|
Python
|
apache-2.0
|
LREN-CHUV/mri-meta-extract,LREN-CHUV/mri-meta-extract
|
from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'airflow>=1.7.0',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
Update to Apache Airflow 1.8.1
|
from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'apache-airflow>=1.8.1',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
|
<commit_before>from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'airflow>=1.7.0',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
<commit_msg>Update to Apache Airflow 1.8.1<commit_after>
|
from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'apache-airflow>=1.8.1',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
|
from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'airflow>=1.7.0',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
Update to Apache Airflow 1.8.1from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'apache-airflow>=1.8.1',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
|
<commit_before>from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'airflow>=1.7.0',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
<commit_msg>Update to Apache Airflow 1.8.1<commit_after>from setuptools import setup
from os import path
import codecs
here = path.abspath(path.dirname(__file__))
with codecs.open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='data-tracking',
version='1.5.11',
zip_safe=False,
url='https://github.com/LREN-CHUV/data-tracking',
description='Extract meta-data from DICOM and NIFTI files',
long_description=long_description,
author='Mirco Nasuti',
author_email='mirco.nasuti@chuv.ch',
license='Apache 2.0',
packages=['data_tracking'],
keywords='mri dicom nifti',
install_requires=[
'apache-airflow>=1.8.1',
'pydicom>=0.9.9',
'SQLAlchemy>=1.1.6',
'python-magic>=0.4.12',
'nibabel>=2.1.0',
'psycopg2>=2.7.1'],
classifiers=(
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Operating System :: Unix',
'License :: OSI Approved :: Apache Software License',
'Topic :: Scientific/Engineering :: Bio-Informatics',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
)
)
|
8954f0d63dd45e9eec1a7f935870ac7c7d2d0bf2
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
|
from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
|
Add Framework::Pytest to list of classifiers
|
Add Framework::Pytest to list of classifiers
|
Python
|
mit
|
pytest-dev/pytest-xdist,nicoddemus/pytest-xdist,RonnyPfannschmidt/pytest-xdist
|
from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
Add Framework::Pytest to list of classifiers
|
from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
|
<commit_before>from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
<commit_msg>Add Framework::Pytest to list of classifiers<commit_after>
|
from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
|
from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
Add Framework::Pytest to list of classifiersfrom setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
|
<commit_before>from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
<commit_msg>Add Framework::Pytest to list of classifiers<commit_after>from setuptools import setup
setup(
name="pytest-xdist",
use_scm_version={'write_to': 'xdist/_version.py'},
description='py.test xdist plugin for distributed testing'
' and loop-on-failing modes',
long_description=open('README.rst').read(),
license='MIT',
author='holger krekel and contributors',
author_email='pytest-dev@python.org,holger@merlinux.eu',
url='https://github.com/pytest-dev/pytest-xdist',
platforms=['linux', 'osx', 'win32'],
packages=['xdist'],
entry_points={
'pytest11': [
'xdist = xdist.plugin',
'xdist.looponfail = xdist.looponfail',
'xdist.boxed = xdist.boxed',
],
},
zip_safe=False,
install_requires=['execnet>=1.1', 'pytest>=2.4.2', 'py>=1.4.22'],
setup_requires=['setuptools_scm'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Framework :: Pytest',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
],
)
|
8b41a38b50b1676f500aeacf9e4d0ee93a92b2d1
|
sometimes/decorators.py
|
sometimes/decorators.py
|
import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
return
wrapped.x = 0
return wrapped
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return
return wrapped
return decorator
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
return
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decorator
|
import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
wrapped.x = 0
return wrapped
half_the_time = sometimes
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return wrapped
return decorator
"""
Only 5% chance of happening
"""
def rarely(fn):
def wrapped(*args, **kwargs):
if in_percentage(5):
fn(*args, **kwargs)
return wrapped
"""
95% chance of happening
"""
def mostly(fn):
def wrapped(*args, **kwargs):
if in_percentage(95):
fn(*args, **kwargs)
return wrapped
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decorator
|
Add rarely, mostly and other alias
|
Add rarely, mostly and other alias
|
Python
|
mit
|
aaronbassett/sometimes
|
import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
return
wrapped.x = 0
return wrapped
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return
return wrapped
return decorator
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
return
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decoratorAdd rarely, mostly and other alias
|
import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
wrapped.x = 0
return wrapped
half_the_time = sometimes
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return wrapped
return decorator
"""
Only 5% chance of happening
"""
def rarely(fn):
def wrapped(*args, **kwargs):
if in_percentage(5):
fn(*args, **kwargs)
return wrapped
"""
95% chance of happening
"""
def mostly(fn):
def wrapped(*args, **kwargs):
if in_percentage(95):
fn(*args, **kwargs)
return wrapped
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decorator
|
<commit_before>import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
return
wrapped.x = 0
return wrapped
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return
return wrapped
return decorator
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
return
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decorator<commit_msg>Add rarely, mostly and other alias<commit_after>
|
import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
wrapped.x = 0
return wrapped
half_the_time = sometimes
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return wrapped
return decorator
"""
Only 5% chance of happening
"""
def rarely(fn):
def wrapped(*args, **kwargs):
if in_percentage(5):
fn(*args, **kwargs)
return wrapped
"""
95% chance of happening
"""
def mostly(fn):
def wrapped(*args, **kwargs):
if in_percentage(95):
fn(*args, **kwargs)
return wrapped
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decorator
|
import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
return
wrapped.x = 0
return wrapped
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return
return wrapped
return decorator
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
return
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decoratorAdd rarely, mostly and other aliasimport random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
wrapped.x = 0
return wrapped
half_the_time = sometimes
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return wrapped
return decorator
"""
Only 5% chance of happening
"""
def rarely(fn):
def wrapped(*args, **kwargs):
if in_percentage(5):
fn(*args, **kwargs)
return wrapped
"""
95% chance of happening
"""
def mostly(fn):
def wrapped(*args, **kwargs):
if in_percentage(95):
fn(*args, **kwargs)
return wrapped
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decorator
|
<commit_before>import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
return
wrapped.x = 0
return wrapped
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return
return wrapped
return decorator
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
return
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decorator<commit_msg>Add rarely, mostly and other alias<commit_after>import random
in_percentage = lambda x: random.randint(1,100) <= x
"""
They've done studies, you know. 50% of the time,
it works every time.
"""
def sometimes(fn):
def wrapped(*args, **kwargs):
wrapped.x += 1
if wrapped.x % 2 == 1:
return fn(*args, **kwargs)
wrapped.x = 0
return wrapped
half_the_time = sometimes
"""
Has a 50/50 chance of calling a function
"""
def sometimesish(fn):
def wrapped(*args, **kwargs):
if random.randint(1,2) == 1:
return fn(*args, **kwargs)
return wrapped
"""
Function has a X percentage chance of running
"""
def percent_of_the_time(p):
def decorator(fn):
def wrapped(*args, **kwargs):
if in_percentage(p):
fn(*args, **kwargs)
return wrapped
return decorator
"""
Only 5% chance of happening
"""
def rarely(fn):
def wrapped(*args, **kwargs):
if in_percentage(5):
fn(*args, **kwargs)
return wrapped
"""
95% chance of happening
"""
def mostly(fn):
def wrapped(*args, **kwargs):
if in_percentage(95):
fn(*args, **kwargs)
return wrapped
"""
Do something a random amount of times
between x & y
"""
def times(x,y):
def decorator(fn):
def wrapped(*args, **kwargs):
while wrapped.min <= wrapped.max:
wrapped.min += 1
fn(*args, **kwargs)
wrapped.min = x
wrapped.max = random.randint(x,y)
return wrapped
return decorator
|
51d6e5f994d0a081b8f381f7c4fbd2b54b78bb02
|
xos/xos/apps.py
|
xos/xos/apps.py
|
from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/serviceGrid/'},
)
|
from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/admin/core/service'},
)
|
Revert service grid to tabular view
|
Revert service grid to tabular view
|
Python
|
apache-2.0
|
cboling/xos,cboling/xos,cboling/xos,cboling/xos,cboling/xos
|
from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/serviceGrid/'},
)Revert service grid to tabular view
|
from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/admin/core/service'},
)
|
<commit_before>from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/serviceGrid/'},
)<commit_msg>Revert service grid to tabular view<commit_after>
|
from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/admin/core/service'},
)
|
from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/serviceGrid/'},
)Revert service grid to tabular viewfrom suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/admin/core/service'},
)
|
<commit_before>from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/serviceGrid/'},
)<commit_msg>Revert service grid to tabular view<commit_after>from suit.apps import DjangoSuitConfig
class MyDjangoSuitConfig(DjangoSuitConfig):
admin_name = 'XOS'
menu_position = 'vertical'
menu_open_first_child = False
menu = (
{'label': 'Deployments', 'icon':'icon-deployment', 'url': '/admin/core/deployment/'},
{'label': 'Sites', 'icon':'icon-site', 'url': '/admin/core/site/'},
{'label': 'Slices', 'icon':'icon-slice', 'url': '/admin/core/slice/'},
{'label': 'Users', 'icon':'icon-user', 'url': '/admin/core/user/'},
{'label': 'Services', 'icon':'icon-cog', 'url': '/admin/core/service'},
)
|
4d212e2f796bc6e473292dab7a56ce74d7c96e41
|
moksha/api/widgets/containers/dashboardcontainer.py
|
moksha/api/widgets/containers/dashboardcontainer.py
|
from moksha.api.widgets.layout.layout import layout_js, layout_css, ui_core_js, ui_draggable_js, ui_droppable_js, ui_sortable_js
from tw.api import Widget
from tw.jquery import jquery_js
from moksha.lib.helpers import eval_app_config, ConfigWrapper
from tg import config
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
|
from tg import config
from tw.api import Widget
from moksha.lib.helpers import eval_app_config, ConfigWrapper
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
|
Clean up some of our dashboard container imports
|
Clean up some of our dashboard container imports
|
Python
|
apache-2.0
|
lmacken/moksha,ralphbean/moksha,pombredanne/moksha,pombredanne/moksha,ralphbean/moksha,pombredanne/moksha,lmacken/moksha,mokshaproject/moksha,ralphbean/moksha,pombredanne/moksha,mokshaproject/moksha,mokshaproject/moksha,lmacken/moksha,mokshaproject/moksha
|
from moksha.api.widgets.layout.layout import layout_js, layout_css, ui_core_js, ui_draggable_js, ui_droppable_js, ui_sortable_js
from tw.api import Widget
from tw.jquery import jquery_js
from moksha.lib.helpers import eval_app_config, ConfigWrapper
from tg import config
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
Clean up some of our dashboard container imports
|
from tg import config
from tw.api import Widget
from moksha.lib.helpers import eval_app_config, ConfigWrapper
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
|
<commit_before>from moksha.api.widgets.layout.layout import layout_js, layout_css, ui_core_js, ui_draggable_js, ui_droppable_js, ui_sortable_js
from tw.api import Widget
from tw.jquery import jquery_js
from moksha.lib.helpers import eval_app_config, ConfigWrapper
from tg import config
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
<commit_msg>Clean up some of our dashboard container imports<commit_after>
|
from tg import config
from tw.api import Widget
from moksha.lib.helpers import eval_app_config, ConfigWrapper
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
|
from moksha.api.widgets.layout.layout import layout_js, layout_css, ui_core_js, ui_draggable_js, ui_droppable_js, ui_sortable_js
from tw.api import Widget
from tw.jquery import jquery_js
from moksha.lib.helpers import eval_app_config, ConfigWrapper
from tg import config
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
Clean up some of our dashboard container importsfrom tg import config
from tw.api import Widget
from moksha.lib.helpers import eval_app_config, ConfigWrapper
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
|
<commit_before>from moksha.api.widgets.layout.layout import layout_js, layout_css, ui_core_js, ui_draggable_js, ui_droppable_js, ui_sortable_js
from tw.api import Widget
from tw.jquery import jquery_js
from moksha.lib.helpers import eval_app_config, ConfigWrapper
from tg import config
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
<commit_msg>Clean up some of our dashboard container imports<commit_after>from tg import config
from tw.api import Widget
from moksha.lib.helpers import eval_app_config, ConfigWrapper
class AppListWidget(Widget):
template = 'mako:moksha.api.widgets.containers.templates.layout_applist'
properties = ['category']
def update_params(self, d):
super(AppListWidget, self).update_params(d)
# we want to error out if there is no category
c = d['category']
if isinstance(c, basestring):
for cat in d['layout']:
if cat['label'] == c:
d['category'] = cat
break
applist_widget = AppListWidget('applist');
class DashboardContainer(Widget):
template = 'mako:moksha.api.widgets.containers.templates.dashboardcontainer'
config_key = None
layout = []
def update_params(self, d):
super(DashboardContainer, self).update_params(d)
layout = eval_app_config(config.get(self.config_key, "None"))
if not layout:
if isinstance(self.layout, basestring):
layout = eval_app_config(self.layout)
else:
layout = self.layout
# Filter out any None's in the layout which signify apps which are
# not allowed to run with the current session's authorization level
l = ConfigWrapper.process_wrappers(layout, d)
d['layout'] = l
d['applist_widget'] = applist_widget
return d
|
545e573c9542d33e80dfe5e7e6bb17487835e053
|
profile_collection/startup/00-startup.py
|
profile_collection/startup/00-startup.py
|
import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.INFO)
logging.getLogger('hxntools').setLevel(logging.INFO)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
|
import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
handler.setLevel(logging.INFO)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.DEBUG)
logging.getLogger('hxntools').setLevel(logging.DEBUG)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
|
Tweak logging configuration - record debug info, display info level
|
Tweak logging configuration - record debug info, display info level
|
Python
|
bsd-2-clause
|
NSLS-II-HXN/ipython_ophyd,NSLS-II-HXN/ipython_ophyd
|
import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.INFO)
logging.getLogger('hxntools').setLevel(logging.INFO)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
Tweak logging configuration - record debug info, display info level
|
import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
handler.setLevel(logging.INFO)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.DEBUG)
logging.getLogger('hxntools').setLevel(logging.DEBUG)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
|
<commit_before>import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.INFO)
logging.getLogger('hxntools').setLevel(logging.INFO)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
<commit_msg>Tweak logging configuration - record debug info, display info level<commit_after>
|
import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
handler.setLevel(logging.INFO)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.DEBUG)
logging.getLogger('hxntools').setLevel(logging.DEBUG)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
|
import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.INFO)
logging.getLogger('hxntools').setLevel(logging.INFO)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
Tweak logging configuration - record debug info, display info levelimport sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
handler.setLevel(logging.INFO)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.DEBUG)
logging.getLogger('hxntools').setLevel(logging.DEBUG)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
|
<commit_before>import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.INFO)
logging.getLogger('hxntools').setLevel(logging.INFO)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
<commit_msg>Tweak logging configuration - record debug info, display info level<commit_after>import sys
import logging
import matplotlib.pyplot as plt
import numpy as np
plt.ion()
handler = logging.StreamHandler(sys.stderr)
fmt = logging.Formatter("%(asctime)-15s [%(name)5s:%(levelname)s] %(message)s")
handler.setFormatter(fmt)
handler.setLevel(logging.INFO)
logging.getLogger('hxntools').addHandler(handler)
logging.getLogger('hxnfly').addHandler(handler)
logging.getLogger('ppmac').addHandler(handler)
logging.getLogger('hxnfly').setLevel(logging.DEBUG)
logging.getLogger('hxntools').setLevel(logging.DEBUG)
logging.getLogger('ppmac').setLevel(logging.INFO)
import pandas as pd
# Flyscan results are shown using pandas. Maximum rows/columns to use when
# printing the table:
pd.options.display.width = 180
pd.options.display.max_rows = None
pd.options.display.max_columns = 10
|
4bc8d4016954e82fb566d7cf43ec21825a0e89de
|
indra/tests/test_tsv_assembler.py
|
indra/tests/test_tsv_assembler.py
|
import os
from indra.assemblers.tsv_assembler import TsvAssembler
from indra.sources.signor import SignorProcessor
# Get some statements from Signor
sp = SignorProcessor()
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
|
import os
from indra.sources import signor
from indra.assemblers.tsv_assembler import TsvAssembler
# Get some statements from Signor
from .test_signor import test_data_file, test_complexes_file
sp = signor.process_from_file(test_data_file, test_complexes_file)
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
|
Fix TSV Assembler reference to Signor files
|
Fix TSV Assembler reference to Signor files
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,bgyori/indra
|
import os
from indra.assemblers.tsv_assembler import TsvAssembler
from indra.sources.signor import SignorProcessor
# Get some statements from Signor
sp = SignorProcessor()
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
Fix TSV Assembler reference to Signor files
|
import os
from indra.sources import signor
from indra.assemblers.tsv_assembler import TsvAssembler
# Get some statements from Signor
from .test_signor import test_data_file, test_complexes_file
sp = signor.process_from_file(test_data_file, test_complexes_file)
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
|
<commit_before>import os
from indra.assemblers.tsv_assembler import TsvAssembler
from indra.sources.signor import SignorProcessor
# Get some statements from Signor
sp = SignorProcessor()
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
<commit_msg>Fix TSV Assembler reference to Signor files<commit_after>
|
import os
from indra.sources import signor
from indra.assemblers.tsv_assembler import TsvAssembler
# Get some statements from Signor
from .test_signor import test_data_file, test_complexes_file
sp = signor.process_from_file(test_data_file, test_complexes_file)
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
|
import os
from indra.assemblers.tsv_assembler import TsvAssembler
from indra.sources.signor import SignorProcessor
# Get some statements from Signor
sp = SignorProcessor()
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
Fix TSV Assembler reference to Signor filesimport os
from indra.sources import signor
from indra.assemblers.tsv_assembler import TsvAssembler
# Get some statements from Signor
from .test_signor import test_data_file, test_complexes_file
sp = signor.process_from_file(test_data_file, test_complexes_file)
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
|
<commit_before>import os
from indra.assemblers.tsv_assembler import TsvAssembler
from indra.sources.signor import SignorProcessor
# Get some statements from Signor
sp = SignorProcessor()
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
<commit_msg>Fix TSV Assembler reference to Signor files<commit_after>import os
from indra.sources import signor
from indra.assemblers.tsv_assembler import TsvAssembler
# Get some statements from Signor
from .test_signor import test_data_file, test_complexes_file
sp = signor.process_from_file(test_data_file, test_complexes_file)
stmts = sp.statements
def test_tsv_init():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test')
def test_tsv_add_stmts():
ta = TsvAssembler()
ta.add_statements(stmts)
assert len(ta.statements) == len(stmts)
def test_make_model():
ta = TsvAssembler(stmts)
ta.make_model('tsv_test.tsv')
assert os.path.exists('tsv_test.tsv')
|
37715104dec586ea67b253e4e7ed35795cb5ea8c
|
track.py
|
track.py
|
# from google_measurement_protocol import Event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
# event = Event('API', 'Fetch', label=label, value=count)
# report('UA-68765997-3', client_id, event)
|
from google_measurement_protocol import event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
data = event('API', 'Fetch', label=label, value=count)
report('UA-68765997-3', client_id, data)
|
Add google measurement protocol back
|
Add google measurement protocol back
|
Python
|
mit
|
reneepadgham/diverseui,reneepadgham/diverseui,reneepadgham/diverseui
|
# from google_measurement_protocol import Event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
# event = Event('API', 'Fetch', label=label, value=count)
# report('UA-68765997-3', client_id, event)
Add google measurement protocol back
|
from google_measurement_protocol import event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
data = event('API', 'Fetch', label=label, value=count)
report('UA-68765997-3', client_id, data)
|
<commit_before># from google_measurement_protocol import Event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
# event = Event('API', 'Fetch', label=label, value=count)
# report('UA-68765997-3', client_id, event)
<commit_msg>Add google measurement protocol back<commit_after>
|
from google_measurement_protocol import event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
data = event('API', 'Fetch', label=label, value=count)
report('UA-68765997-3', client_id, data)
|
# from google_measurement_protocol import Event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
# event = Event('API', 'Fetch', label=label, value=count)
# report('UA-68765997-3', client_id, event)
Add google measurement protocol backfrom google_measurement_protocol import event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
data = event('API', 'Fetch', label=label, value=count)
report('UA-68765997-3', client_id, data)
|
<commit_before># from google_measurement_protocol import Event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
# event = Event('API', 'Fetch', label=label, value=count)
# report('UA-68765997-3', client_id, event)
<commit_msg>Add google measurement protocol back<commit_after>from google_measurement_protocol import event, report
import uuid
GENDERS = {
'female': 'Gender Female',
'male': 'Gender Male'
}
def log_fetch(count, gender):
label = GENDERS.get(gender, 'Gender Neutral')
client_id = uuid.uuid4()
data = event('API', 'Fetch', label=label, value=count)
report('UA-68765997-3', client_id, data)
|
30fae197ff6561a58df33868b3379a41d6a9d9dd
|
settings_test.py
|
settings_test.py
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
ENFORCE_PRIVACY = False
|
Add ENFORCE_PRIVACY to Travis testing settings.
|
Add ENFORCE_PRIVACY to Travis testing settings.
|
Python
|
agpl-3.0
|
geotagx/geotagx-pybossa-archive,inteligencia-coletiva-lsd/pybossa,Scifabric/pybossa,geotagx/geotagx-pybossa-archive,jean/pybossa,CulturePlex/pybossa,PyBossa/pybossa,geotagx/geotagx-pybossa-archive,CulturePlex/pybossa,inteligencia-coletiva-lsd/pybossa,OpenNewsLabs/pybossa,PyBossa/pybossa,OpenNewsLabs/pybossa,proyectos-analizo-info/pybossa-analizo-info,geotagx/pybossa,jean/pybossa,proyectos-analizo-info/pybossa-analizo-info,stefanhahmann/pybossa,CulturePlex/pybossa,geotagx/geotagx-pybossa-archive,harihpr/tweetclickers,stefanhahmann/pybossa,proyectos-analizo-info/pybossa-analizo-info,harihpr/tweetclickers,geotagx/geotagx-pybossa-archive,geotagx/pybossa,Scifabric/pybossa
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
Add ENFORCE_PRIVACY to Travis testing settings.
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
ENFORCE_PRIVACY = False
|
<commit_before>SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
<commit_msg>Add ENFORCE_PRIVACY to Travis testing settings.<commit_after>
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
ENFORCE_PRIVACY = False
|
SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
Add ENFORCE_PRIVACY to Travis testing settings.SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
ENFORCE_PRIVACY = False
|
<commit_before>SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
<commit_msg>Add ENFORCE_PRIVACY to Travis testing settings.<commit_after>SQLALCHEMY_DATABASE_TEST_URI = 'postgresql://postgres:@localhost/pybossa'
GOOGLE_CLIENT_ID = ''
GOOGLE_CLIENT_SECRET = ''
TWITTER_CONSUMER_KEY=''
TWITTER_CONSUMER_SECRET=''
FACEBOOK_APP_ID=''
FACEBOOK_APP_SECRET=''
TERMSOFUSE = 'http://okfn.org/terms-of-use/'
DATAUSE = 'http://opendatacommons.org/licenses/by/'
ITSDANGEORUSKEY = 'its-dangerous-key'
LOGO = 'logo.png'
MAIL_SERVER = 'localhost'
MAIL_USERNAME = None
MAIL_PASSWORD = None
MAIL_PORT = 25
MAIL_FAIL_SILENTLY = False
MAIL_DEFAULT_SENDER = 'PyBossa Support <info@pybossa.com>'
ANNOUNCEMENT = {'admin': 'Root Message', 'user': 'User Message', 'owner': 'Owner Message'}
LOCALES = ['en', 'es']
ENFORCE_PRIVACY = False
|
d1e64b8cf97f8a89d61ecd5d5bd7f9ba6f5ff6b8
|
extruct/jsonld.py
|
extruct/jsonld.py
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
try:
data = json.loads(script)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
try:
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script))
except ValueError: # ValueError again because json.JSONDecodeError(bases from ValueError) appears since Python 3.5
# some pages have JSON-LD data with control characters, json.loads should use strict=False
data = json.loads(script, strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
# now do remove possible leading HTML/JavaScript comment first, allow control characters to be loaded
# TODO: `strict=False` can be configurable if needed
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
|
Remove leading comments and allow control characters directly.
|
Mod: Remove leading comments and allow control characters directly.
|
Python
|
bsd-3-clause
|
scrapinghub/extruct
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
try:
data = json.loads(script)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
try:
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script))
except ValueError: # ValueError again because json.JSONDecodeError(bases from ValueError) appears since Python 3.5
# some pages have JSON-LD data with control characters, json.loads should use strict=False
data = json.loads(script, strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
Mod: Remove leading comments and allow control characters directly.
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
# now do remove possible leading HTML/JavaScript comment first, allow control characters to be loaded
# TODO: `strict=False` can be configurable if needed
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
|
<commit_before># -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
try:
data = json.loads(script)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
try:
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script))
except ValueError: # ValueError again because json.JSONDecodeError(bases from ValueError) appears since Python 3.5
# some pages have JSON-LD data with control characters, json.loads should use strict=False
data = json.loads(script, strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
<commit_msg>Mod: Remove leading comments and allow control characters directly.<commit_after>
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
# now do remove possible leading HTML/JavaScript comment first, allow control characters to be loaded
# TODO: `strict=False` can be configurable if needed
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
|
# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
try:
data = json.loads(script)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
try:
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script))
except ValueError: # ValueError again because json.JSONDecodeError(bases from ValueError) appears since Python 3.5
# some pages have JSON-LD data with control characters, json.loads should use strict=False
data = json.loads(script, strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
Mod: Remove leading comments and allow control characters directly.# -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
# now do remove possible leading HTML/JavaScript comment first, allow control characters to be loaded
# TODO: `strict=False` can be configurable if needed
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
|
<commit_before># -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
try:
data = json.loads(script)
except ValueError:
# sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
try:
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script))
except ValueError: # ValueError again because json.JSONDecodeError(bases from ValueError) appears since Python 3.5
# some pages have JSON-LD data with control characters, json.loads should use strict=False
data = json.loads(script, strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
<commit_msg>Mod: Remove leading comments and allow control characters directly.<commit_after># -*- coding: utf-8 -*-
"""
JSON-LD extractor
"""
import json
import re
import lxml.etree
import lxml.html
HTML_OR_JS_COMMENTLINE = re.compile('^\s*(//.*|<!--.*-->)')
class JsonLdExtractor(object):
_xp_jsonld = lxml.etree.XPath('descendant-or-self::script[@type="application/ld+json"]')
def extract(self, htmlstring, base_url=None, encoding="UTF-8"):
parser = lxml.html.HTMLParser(encoding=encoding)
lxmldoc = lxml.html.fromstring(htmlstring, parser=parser)
return self.extract_items(lxmldoc, base_url=base_url)
def extract_items(self, document, base_url=None):
return [item for items in map(self._extract_items,
self._xp_jsonld(document))
for item in items
if item]
def _extract_items(self, node):
script = node.xpath('string()')
# now do remove possible leading HTML/JavaScript comment first, allow control characters to be loaded
# TODO: `strict=False` can be configurable if needed
data = json.loads(HTML_OR_JS_COMMENTLINE.sub('', script), strict=False)
if isinstance(data, list):
return data
elif isinstance(data, dict):
return [data]
|
9f02929673389884d4dd261964b7b1be6c959caa
|
vault.py
|
vault.py
|
import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible import utils, errors
from ansible.utils import template
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, inject=None, **kwargs):
try:
terms = template.template(self.basedir, terms, inject)
except Exception, e:
pass
url = os.getenv('VAULT_ADDR')
if not url:
raise errors.AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise errors.AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (terms))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise errors.AnsibleError('Unable to read %s from vault: %s' % (terms, e))
except:
raise errors.AnsibleError('Unable to read %s from vault' % terms)
result = json.loads(response.read())
return [result['data']['value']]
|
import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
key = terms[0]
url = os.getenv('VAULT_ADDR')
if not url:
raise AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (key))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise AnsibleError('Unable to read %s from vault: %s' % (key, e))
except:
raise AnsibleError('Unable to read %s from vault' % key)
result = json.loads(response.read())
return [result['data']['value']]
|
Update plugin for ansible 2.0
|
Update plugin for ansible 2.0
|
Python
|
bsd-3-clause
|
jhaals/ansible-vault,jhaals/ansible-vault,locationlabs/ansible-vault
|
import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible import utils, errors
from ansible.utils import template
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, inject=None, **kwargs):
try:
terms = template.template(self.basedir, terms, inject)
except Exception, e:
pass
url = os.getenv('VAULT_ADDR')
if not url:
raise errors.AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise errors.AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (terms))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise errors.AnsibleError('Unable to read %s from vault: %s' % (terms, e))
except:
raise errors.AnsibleError('Unable to read %s from vault' % terms)
result = json.loads(response.read())
return [result['data']['value']]
Update plugin for ansible 2.0
|
import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
key = terms[0]
url = os.getenv('VAULT_ADDR')
if not url:
raise AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (key))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise AnsibleError('Unable to read %s from vault: %s' % (key, e))
except:
raise AnsibleError('Unable to read %s from vault' % key)
result = json.loads(response.read())
return [result['data']['value']]
|
<commit_before>import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible import utils, errors
from ansible.utils import template
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, inject=None, **kwargs):
try:
terms = template.template(self.basedir, terms, inject)
except Exception, e:
pass
url = os.getenv('VAULT_ADDR')
if not url:
raise errors.AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise errors.AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (terms))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise errors.AnsibleError('Unable to read %s from vault: %s' % (terms, e))
except:
raise errors.AnsibleError('Unable to read %s from vault' % terms)
result = json.loads(response.read())
return [result['data']['value']]
<commit_msg>Update plugin for ansible 2.0<commit_after>
|
import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
key = terms[0]
url = os.getenv('VAULT_ADDR')
if not url:
raise AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (key))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise AnsibleError('Unable to read %s from vault: %s' % (key, e))
except:
raise AnsibleError('Unable to read %s from vault' % key)
result = json.loads(response.read())
return [result['data']['value']]
|
import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible import utils, errors
from ansible.utils import template
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, inject=None, **kwargs):
try:
terms = template.template(self.basedir, terms, inject)
except Exception, e:
pass
url = os.getenv('VAULT_ADDR')
if not url:
raise errors.AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise errors.AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (terms))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise errors.AnsibleError('Unable to read %s from vault: %s' % (terms, e))
except:
raise errors.AnsibleError('Unable to read %s from vault' % terms)
result = json.loads(response.read())
return [result['data']['value']]
Update plugin for ansible 2.0import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
key = terms[0]
url = os.getenv('VAULT_ADDR')
if not url:
raise AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (key))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise AnsibleError('Unable to read %s from vault: %s' % (key, e))
except:
raise AnsibleError('Unable to read %s from vault' % key)
result = json.loads(response.read())
return [result['data']['value']]
|
<commit_before>import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible import utils, errors
from ansible.utils import template
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, inject=None, **kwargs):
try:
terms = template.template(self.basedir, terms, inject)
except Exception, e:
pass
url = os.getenv('VAULT_ADDR')
if not url:
raise errors.AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise errors.AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (terms))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise errors.AnsibleError('Unable to read %s from vault: %s' % (terms, e))
except:
raise errors.AnsibleError('Unable to read %s from vault' % terms)
result = json.loads(response.read())
return [result['data']['value']]
<commit_msg>Update plugin for ansible 2.0<commit_after>import os
import urllib2
import json
import sys
from urlparse import urljoin
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
key = terms[0]
url = os.getenv('VAULT_ADDR')
if not url:
raise AnsibleError('VAULT_ADDR environment variable is missing')
token = os.getenv('VAULT_TOKEN')
if not token:
raise AnsibleError('VAULT_TOKEN environment variable is missing')
request_url = urljoin(url, "v1/%s" % (key))
try:
headers = { 'X-Vault-Token' : token }
req = urllib2.Request(request_url, None, headers)
response = urllib2.urlopen(req)
except urllib2.HTTPError as e:
raise AnsibleError('Unable to read %s from vault: %s' % (key, e))
except:
raise AnsibleError('Unable to read %s from vault' % key)
result = json.loads(response.read())
return [result['data']['value']]
|
5f8d59646875d4e4aa75ec22a2ddc666c1802a23
|
readthedocs/core/utils/tasks/__init__.py
|
readthedocs/core/utils/tasks/__init__.py
|
from .permission_checks import user_id_matches
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
|
from .permission_checks import user_id_matches
from .public import PublicTask
from .public import TaskNoPermission
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
|
Revert previous commit by adding missing imports
|
Revert previous commit by adding missing imports
|
Python
|
mit
|
rtfd/readthedocs.org,tddv/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,pombredanne/readthedocs.org,pombredanne/readthedocs.org,pombredanne/readthedocs.org,safwanrahman/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,tddv/readthedocs.org,davidfischer/readthedocs.org,safwanrahman/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org
|
from .permission_checks import user_id_matches
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
Revert previous commit by adding missing imports
|
from .permission_checks import user_id_matches
from .public import PublicTask
from .public import TaskNoPermission
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
|
<commit_before>from .permission_checks import user_id_matches
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
<commit_msg>Revert previous commit by adding missing imports<commit_after>
|
from .permission_checks import user_id_matches
from .public import PublicTask
from .public import TaskNoPermission
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
|
from .permission_checks import user_id_matches
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
Revert previous commit by adding missing importsfrom .permission_checks import user_id_matches
from .public import PublicTask
from .public import TaskNoPermission
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
|
<commit_before>from .permission_checks import user_id_matches
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
<commit_msg>Revert previous commit by adding missing imports<commit_after>from .permission_checks import user_id_matches
from .public import PublicTask
from .public import TaskNoPermission
from .public import permission_check
from .public import get_public_task_data
from .retrieve import TaskNotFound
from .retrieve import get_task_data
|
1706472a183d50fb11513345b4c82ac201a00306
|
me_api/configs.py
|
me_api/configs.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
CACHE_TYPE = 'simple'
class ProductionConfig(Config):
DEBUG = False
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
CACHE_TYPE = 'simple'
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
class ProductionConfig(Config):
DEBUG = False
class TestingConfig(Config):
TESTING = True
|
Move CACHE_TYPE to Config; add TestingConfig
|
Move CACHE_TYPE to Config; add TestingConfig
|
Python
|
mit
|
lord63/me-api
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
CACHE_TYPE = 'simple'
class ProductionConfig(Config):
DEBUG = False
Move CACHE_TYPE to Config; add TestingConfig
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
CACHE_TYPE = 'simple'
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
class ProductionConfig(Config):
DEBUG = False
class TestingConfig(Config):
TESTING = True
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
CACHE_TYPE = 'simple'
class ProductionConfig(Config):
DEBUG = False
<commit_msg>Move CACHE_TYPE to Config; add TestingConfig<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
CACHE_TYPE = 'simple'
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
class ProductionConfig(Config):
DEBUG = False
class TestingConfig(Config):
TESTING = True
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
CACHE_TYPE = 'simple'
class ProductionConfig(Config):
DEBUG = False
Move CACHE_TYPE to Config; add TestingConfig#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
CACHE_TYPE = 'simple'
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
class ProductionConfig(Config):
DEBUG = False
class TestingConfig(Config):
TESTING = True
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
CACHE_TYPE = 'simple'
class ProductionConfig(Config):
DEBUG = False
<commit_msg>Move CACHE_TYPE to Config; add TestingConfig<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import json
from os import path
class Config(object):
CACHE_TYPE = 'simple'
cwd = path.abspath(path.dirname(__file__))
with open(path.join(cwd, 'me.json')) as me:
me = json.load(me)
with open(path.join(cwd, 'modules.json')) as modules:
modules = json.load(modules)
class DevelopConfig(Config):
DEBUG = True
class ProductionConfig(Config):
DEBUG = False
class TestingConfig(Config):
TESTING = True
|
9467cfc4fa3f0bd2c269f3d7b61460ddc6851f9f
|
tests/test_dfw_uncomparables.py
|
tests/test_dfw_uncomparables.py
|
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
|
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
|
Add test for exception to uncomparable check
|
Add test for exception to uncomparable check
|
Python
|
bsd-3-clause
|
jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint
|
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
Add test for exception to uncomparable check
|
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
|
<commit_before>"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
<commit_msg>Add test for exception to uncomparable check<commit_after>
|
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
|
"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
Add test for exception to uncomparable check"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
|
<commit_before>"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
<commit_msg>Add test for exception to uncomparable check<commit_after>"""Test dfw.uncomparables."""
from check import Check
from proselint.checks.wallace import uncomparables as chk
class TestCheck(Check):
"""The test class for dfw.uncomparables."""
__test__ = True
@property
def this_check(self):
"""Bolierplate."""
return chk
def test_sample_phrases(self):
"""Find 'very unique'."""
assert not self.passes("""This sentence is very unique.""")
def test_linebreaks(self):
"""Handle linebreaks correctly."""
assert not self.passes("""This sentence is very\nunique.""")
def test_constitutional(self):
"""Don't flag 'more perfect'."""
assert self.passes("""A more perfect union.""")
|
7e23d49dbd66fac972539326c81448e8439206e8
|
PyGitUp/utils.py
|
PyGitUp/utils.py
|
# coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess.check_output(cmd, cwd=cwd).splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
|
# coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess\
.check_output(cmd, cwd=cwd, stderr=subprocess.DEVNULL)\
.splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
|
Hide stderr messages when detecting git dir
|
Hide stderr messages when detecting git dir
|
Python
|
mit
|
msiemens/PyGitUp
|
# coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess.check_output(cmd, cwd=cwd).splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
Hide stderr messages when detecting git dir
|
# coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess\
.check_output(cmd, cwd=cwd, stderr=subprocess.DEVNULL)\
.splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
|
<commit_before># coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess.check_output(cmd, cwd=cwd).splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
<commit_msg>Hide stderr messages when detecting git dir<commit_after>
|
# coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess\
.check_output(cmd, cwd=cwd, stderr=subprocess.DEVNULL)\
.splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
|
# coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess.check_output(cmd, cwd=cwd).splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
Hide stderr messages when detecting git dir# coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess\
.check_output(cmd, cwd=cwd, stderr=subprocess.DEVNULL)\
.splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
|
<commit_before># coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess.check_output(cmd, cwd=cwd).splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
<commit_msg>Hide stderr messages when detecting git dir<commit_after># coding=utf-8
"""
Some simple, generic usefull methods.
"""
import subprocess
import sys
def find(seq, test):
""" Return first item in sequence where test(item) == True """
for item in seq:
if test(item):
return item
def uniq(seq):
""" Return a copy of seq without duplicates. """
seen = set()
return [x for x in seq if str(x) not in seen and not seen.add(str(x))]
def execute(cmd, cwd=None):
""" Execute a command and return it's output. """
try:
lines = subprocess\
.check_output(cmd, cwd=cwd, stderr=subprocess.DEVNULL)\
.splitlines()
except subprocess.CalledProcessError:
return None
else:
if lines:
return decode(lines[0].strip())
else:
return None
def decode(s):
"""
Decode a string using the system encoding if needed (ie byte strings)
"""
if isinstance(s, bytes):
return s.decode(sys.getdefaultencoding())
else:
return s
|
8b2f251be2c5723e7825a38634e84dbb82b0d844
|
sir/__main__.py
|
sir/__main__.py
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
Include timestamps in log messages
|
Include timestamps in log messages
|
Python
|
mit
|
jeffweeksio/sir
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()Include timestamps in log messages
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
<commit_before># Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()<commit_msg>Include timestamps in log messages<commit_after>
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()Include timestamps in log messages# Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
<commit_before># Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()<commit_msg>Include timestamps in log messages<commit_after># Copyright (c) 2014 Wieland Hoffmann
# License: MIT, see LICENSE for details
import argparse
import logging
from . import config
from .indexing import reindex
logger = logging.getLogger("sir")
def watch(args):
raise NotImplementedError
def main():
loghandler = logging.StreamHandler()
formatter = logging.Formatter(fmt="%(asctime)s %(threadName)s %(levelname)s: %(message)s")
loghandler.setFormatter(formatter)
logger.addHandler(loghandler)
parser = argparse.ArgumentParser()
parser.add_argument("-d", "--debug", action="store_true")
subparsers = parser.add_subparsers()
reindex_parser = subparsers.add_parser("reindex", help="Reindexes all or a single entity type")
reindex_parser.set_defaults(func=reindex)
reindex_parser.add_argument('--entities', action='append', help='The entities to reindex')
watch_parser = subparsers.add_parser("watch", help="Watches for incoming messages on an AMQP queue")
watch_parser.set_defaults(func=watch)
args = parser.parse_args()
if args.debug:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
config.read_config()
func = args.func
args = vars(args)
func(args["entities"], args["debug"])
if __name__ == '__main__':
main()
|
2156fbea296484d528a1fbd1a2f4e4ac76af970d
|
salt/states/disk.py
|
salt/states/disk.py
|
'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = disk.usage()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
|
'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = __salt__['disk.usage']()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
|
Fix bad ref, forgot the __salt__ :P
|
Fix bad ref, forgot the __salt__ :P
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = disk.usage()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
Fix bad ref, forgot the __salt__ :P
|
'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = __salt__['disk.usage']()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
|
<commit_before>'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = disk.usage()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
<commit_msg>Fix bad ref, forgot the __salt__ :P<commit_after>
|
'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = __salt__['disk.usage']()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
|
'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = disk.usage()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
Fix bad ref, forgot the __salt__ :P'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = __salt__['disk.usage']()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
|
<commit_before>'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = disk.usage()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
<commit_msg>Fix bad ref, forgot the __salt__ :P<commit_after>'''
Disk monitoring state
Monitor the state of disk resources
'''
def status(name, max=None, min=None):
'''
Return the current disk usage stats for the named device
'''
# Monitoring state, no changes will be made so no test interface needed
ret = {'name': name,
'result': False,
'comment': '',
'changes': {},
'data': {}} # Data field for monitoring state
data = __salt__['disk.usage']()
if not name in data:
ret['result'] = False
ret['comment'] += 'Named disk mount not present '
return ret
if max:
try:
if isinstance(max, basestring):
max = int(max.strip('%'))
except Exception:
ret['comment'] += 'Max argument must be an integer '
if min:
try:
if isinstance(min, basestring):
min = int(min.strip('%'))
except Exception:
ret['comment'] += 'Min argument must be an integer '
if min and max:
if min >= max:
ret['comment'] += 'Min must be less than max'
if ret['comment']:
return ret
cap = int(data[name]['capacity'].strip('%'))
ret['data'] = data[name]
if min:
if cap < min:
ret['comment'] = 'Disk is below minimum of {0} at {1}'.format(
min, cap)
return ret
if max:
if cap > max:
ret['comment'] = 'Disk is below maximum of {0} at {1}'.format(
max, cap)
return ret
ret['comment'] = 'Disk in acceptable range'
ret['result'] = True
return ret
|
27e2dcb42f4d485b09aa043a19dfc37a8d01c4c5
|
test_package.py
|
test_package.py
|
import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
result = ciu.check(projects=[project])
print(result)
if __name__ == '__main__':
main()
|
from urllib.parse import urlparse
import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
# check if there is github page
with ciu.pypi.pypi_client() as client:
releases = client.package_releases(project)
if not releases:
print('NO releases found for {}'.format(project))
sys.exit(1)
got_github = False
version = releases[0]
package_data = client.package_data(project, version)
classifiers = package_data['classifiers']
if 'Programming Language :: Python :: 3' in classifiers:
print('py3 OK')
sys.exit(0)
r = urlparse(package_data['home_page'])
if r.netloc == 'github.com':
got_github = True
if got_github:
print('github FOUND')
else:
print('github NOT FOUND')
print('py3 NOT OK')
sys.exit(1)
if __name__ == '__main__':
main()
|
Update the script to directly use pypi client
|
Update the script to directly use pypi client
* Faster to get to the data
|
Python
|
mit
|
PythonCharmers/autoporter
|
import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
result = ciu.check(projects=[project])
print(result)
if __name__ == '__main__':
main()
Update the script to directly use pypi client
* Faster to get to the data
|
from urllib.parse import urlparse
import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
# check if there is github page
with ciu.pypi.pypi_client() as client:
releases = client.package_releases(project)
if not releases:
print('NO releases found for {}'.format(project))
sys.exit(1)
got_github = False
version = releases[0]
package_data = client.package_data(project, version)
classifiers = package_data['classifiers']
if 'Programming Language :: Python :: 3' in classifiers:
print('py3 OK')
sys.exit(0)
r = urlparse(package_data['home_page'])
if r.netloc == 'github.com':
got_github = True
if got_github:
print('github FOUND')
else:
print('github NOT FOUND')
print('py3 NOT OK')
sys.exit(1)
if __name__ == '__main__':
main()
|
<commit_before>import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
result = ciu.check(projects=[project])
print(result)
if __name__ == '__main__':
main()
<commit_msg>Update the script to directly use pypi client
* Faster to get to the data<commit_after>
|
from urllib.parse import urlparse
import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
# check if there is github page
with ciu.pypi.pypi_client() as client:
releases = client.package_releases(project)
if not releases:
print('NO releases found for {}'.format(project))
sys.exit(1)
got_github = False
version = releases[0]
package_data = client.package_data(project, version)
classifiers = package_data['classifiers']
if 'Programming Language :: Python :: 3' in classifiers:
print('py3 OK')
sys.exit(0)
r = urlparse(package_data['home_page'])
if r.netloc == 'github.com':
got_github = True
if got_github:
print('github FOUND')
else:
print('github NOT FOUND')
print('py3 NOT OK')
sys.exit(1)
if __name__ == '__main__':
main()
|
import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
result = ciu.check(projects=[project])
print(result)
if __name__ == '__main__':
main()
Update the script to directly use pypi client
* Faster to get to the datafrom urllib.parse import urlparse
import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
# check if there is github page
with ciu.pypi.pypi_client() as client:
releases = client.package_releases(project)
if not releases:
print('NO releases found for {}'.format(project))
sys.exit(1)
got_github = False
version = releases[0]
package_data = client.package_data(project, version)
classifiers = package_data['classifiers']
if 'Programming Language :: Python :: 3' in classifiers:
print('py3 OK')
sys.exit(0)
r = urlparse(package_data['home_page'])
if r.netloc == 'github.com':
got_github = True
if got_github:
print('github FOUND')
else:
print('github NOT FOUND')
print('py3 NOT OK')
sys.exit(1)
if __name__ == '__main__':
main()
|
<commit_before>import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
result = ciu.check(projects=[project])
print(result)
if __name__ == '__main__':
main()
<commit_msg>Update the script to directly use pypi client
* Faster to get to the data<commit_after>from urllib.parse import urlparse
import caniusepython3 as ciu
def main():
import sys
project = sys.argv[1]
# check if there is github page
with ciu.pypi.pypi_client() as client:
releases = client.package_releases(project)
if not releases:
print('NO releases found for {}'.format(project))
sys.exit(1)
got_github = False
version = releases[0]
package_data = client.package_data(project, version)
classifiers = package_data['classifiers']
if 'Programming Language :: Python :: 3' in classifiers:
print('py3 OK')
sys.exit(0)
r = urlparse(package_data['home_page'])
if r.netloc == 'github.com':
got_github = True
if got_github:
print('github FOUND')
else:
print('github NOT FOUND')
print('py3 NOT OK')
sys.exit(1)
if __name__ == '__main__':
main()
|
696716ed9fb93f12bcb36d16611ea26bead0aafe
|
test_portend.py
|
test_portend.py
|
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
infos = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
for info in infos:
yield host, port, info
@pytest.fixture(params=list(socket_infos()))
def listening_addr(request):
host, port, info = request.param
af, socktype, proto, canonname, sa = info
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
|
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_listening_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_listening_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
|
Refactor fixture to provide meaningful ids
|
Refactor fixture to provide meaningful ids
|
Python
|
mit
|
jaraco/portend
|
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
infos = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
for info in infos:
yield host, port, info
@pytest.fixture(params=list(socket_infos()))
def listening_addr(request):
host, port, info = request.param
af, socktype, proto, canonname, sa = info
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
Refactor fixture to provide meaningful ids
|
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_listening_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_listening_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
|
<commit_before>import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
infos = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
for info in infos:
yield host, port, info
@pytest.fixture(params=list(socket_infos()))
def listening_addr(request):
host, port, info = request.param
af, socktype, proto, canonname, sa = info
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
<commit_msg>Refactor fixture to provide meaningful ids<commit_after>
|
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_listening_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_listening_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
|
import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
infos = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
for info in infos:
yield host, port, info
@pytest.fixture(params=list(socket_infos()))
def listening_addr(request):
host, port, info = request.param
af, socktype, proto, canonname, sa = info
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
Refactor fixture to provide meaningful idsimport socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_listening_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_listening_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
|
<commit_before>import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
infos = socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
for info in infos:
yield host, port, info
@pytest.fixture(params=list(socket_infos()))
def listening_addr(request):
host, port, info = request.param
af, socktype, proto, canonname, sa = info
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
<commit_msg>Refactor fixture to provide meaningful ids<commit_after>import socket
import pytest
import portend
def socket_infos():
"""
Generate addr infos for connections to localhost
"""
host = ''
port = portend.find_available_local_port()
return socket.getaddrinfo(host, port, socket.AF_UNSPEC, socket.SOCK_STREAM)
def id_for_info(info):
af, = info[:1]
return str(af)
def build_listening_infos():
params = list(socket_infos())
ids = list(map(id_for_info, params))
return locals()
@pytest.fixture(**build_listening_infos())
def listening_addr(request):
af, socktype, proto, canonname, sa = request.param
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
sock.listen(5)
try:
yield sa
finally:
sock.close()
class TestCheckPort:
def test_check_port_listening(self, listening_addr):
with pytest.raises(IOError):
portend._check_port(*listening_addr[:2])
|
ce344bac2ca4ddb027a50f523e6bd8ce04de6ca8
|
matrix.py
|
matrix.py
|
from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
print(offsets)
neighbours = set()
for dx, dy in offsets:
i = max(0, min(x + dx, rows))
j = max(0, min(y + dy, cols))
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
|
from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
Checks array bounds so we don't get index errors.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
neighbours = set()
for dx, dy in offsets:
i = x + dx
j = y + dy
if i < 0 or i >= rows or j < 0 or j >= cols:
continue # Don't add coords outside of array bounds
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
|
Fix find_neighbours_2D to ignore out of bounds points
|
Fix find_neighbours_2D to ignore out of bounds points
... rather than 'trim' the coords, which made no sense.
|
Python
|
mit
|
supermitch/Island-Gen
|
from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
print(offsets)
neighbours = set()
for dx, dy in offsets:
i = max(0, min(x + dx, rows))
j = max(0, min(y + dy, cols))
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
Fix find_neighbours_2D to ignore out of bounds points
... rather than 'trim' the coords, which made no sense.
|
from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
Checks array bounds so we don't get index errors.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
neighbours = set()
for dx, dy in offsets:
i = x + dx
j = y + dy
if i < 0 or i >= rows or j < 0 or j >= cols:
continue # Don't add coords outside of array bounds
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
|
<commit_before>from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
print(offsets)
neighbours = set()
for dx, dy in offsets:
i = max(0, min(x + dx, rows))
j = max(0, min(y + dy, cols))
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
<commit_msg>Fix find_neighbours_2D to ignore out of bounds points
... rather than 'trim' the coords, which made no sense.<commit_after>
|
from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
Checks array bounds so we don't get index errors.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
neighbours = set()
for dx, dy in offsets:
i = x + dx
j = y + dy
if i < 0 or i >= rows or j < 0 or j >= cols:
continue # Don't add coords outside of array bounds
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
|
from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
print(offsets)
neighbours = set()
for dx, dy in offsets:
i = max(0, min(x + dx, rows))
j = max(0, min(y + dy, cols))
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
Fix find_neighbours_2D to ignore out of bounds points
... rather than 'trim' the coords, which made no sense.from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
Checks array bounds so we don't get index errors.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
neighbours = set()
for dx, dy in offsets:
i = x + dx
j = y + dy
if i < 0 or i >= rows or j < 0 or j >= cols:
continue # Don't add coords outside of array bounds
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
|
<commit_before>from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
print(offsets)
neighbours = set()
for dx, dy in offsets:
i = max(0, min(x + dx, rows))
j = max(0, min(y + dy, cols))
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
<commit_msg>Fix find_neighbours_2D to ignore out of bounds points
... rather than 'trim' the coords, which made no sense.<commit_after>from __future__ import division
import itertools
def get_offsets(span):
"""
Get matrix offsets for a square of distance `span`.
"""
if span < 0:
raise ValueError('Cannot return neighbours for negative distance')
all_offsets = set(itertools.product([x for x in range(-span, span + 1)], repeat=2))
if span >= 1:
inner_offsets = set(itertools.product([x for x in range(-(span - 1), span)], repeat=2))
else:
inner_offsets = set()
return all_offsets - inner_offsets
def find_neighbours_2D(array, start, span):
"""
Return neighbours in a 2D array, given a start point and range.
Checks array bounds so we don't get index errors.
"""
x, y = start # Start coords
rows = len(array) # How many rows
cols = len(array[0]) # Assume square matrix
offsets = get_offsets(span)
neighbours = set()
for dx, dy in offsets:
i = x + dx
j = y + dy
if i < 0 or i >= rows or j < 0 or j >= cols:
continue # Don't add coords outside of array bounds
neighbours.add((i, j))
return neighbours
def new(size, value=None):
""" Initialize a new square matrix. """
return [[value] * size for _ in range(size)]
|
b7d8e70bf74be142f70bf12635a4bb1632d166ed
|
funnel/forms/label.py
|
funnel/forms/label.py
|
# -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
seq = forms.IntegerField("", widget=forms.HiddenInput())
|
# -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
seq = forms.IntegerField("", widget=forms.HiddenInput())
|
Add form validator for icon_emoji
|
Add form validator for icon_emoji
|
Python
|
agpl-3.0
|
hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel
|
# -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
seq = forms.IntegerField("", widget=forms.HiddenInput())
Add form validator for icon_emoji
|
# -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
seq = forms.IntegerField("", widget=forms.HiddenInput())
|
<commit_before># -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
seq = forms.IntegerField("", widget=forms.HiddenInput())
<commit_msg>Add form validator for icon_emoji<commit_after>
|
# -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
seq = forms.IntegerField("", widget=forms.HiddenInput())
|
# -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
seq = forms.IntegerField("", widget=forms.HiddenInput())
Add form validator for icon_emoji# -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
seq = forms.IntegerField("", widget=forms.HiddenInput())
|
<commit_before># -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField("")
seq = forms.IntegerField("", widget=forms.HiddenInput())
<commit_msg>Add form validator for icon_emoji<commit_after># -*- coding: utf-8 -*-
from baseframe import __
import baseframe.forms as forms
__all__ = ['LabelForm', 'LabelOptionForm']
class LabelForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Label"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
required = forms.BooleanField(
__("Make this label mandatory in proposal forms"),
default=False,
description=__("If checked, proposers must select one of the options"),
)
restricted = forms.BooleanField(
__("Restrict use of this label to editors"),
default=False,
description=__(
"If checked, only editors and reviewers can apply this label on proposals"
),
)
class LabelOptionForm(forms.Form):
name = forms.StringField(
"", widget=forms.HiddenInput(), validators=[forms.validators.Optional()]
)
title = forms.StringField(
__("Option"),
validators=[
forms.validators.DataRequired(__(u"This can’t be empty")),
forms.validators.Length(max=250),
],
filters=[forms.filters.strip()],
)
icon_emoji = forms.StringField(
"", validators=[forms.validators.IsEmoji()]
)
seq = forms.IntegerField("", widget=forms.HiddenInput())
|
eeb7b25b59d7ed28c76a288fd7e29c8953ac4503
|
pluginsmanager/model/lv2/lv2_effect_builder.py
|
pluginsmanager/model/lv2/lv2_effect_builder.py
|
import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
supported_plugins = self._supported_plugins
for plugin in data:
if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
|
import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
#supported_plugins = self._supported_plugins
for plugin in data:
# if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
|
Fix lv2 effect builder for travis build
|
Fix lv2 effect builder for travis build
|
Python
|
apache-2.0
|
PedalPi/PluginsManager
|
import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
supported_plugins = self._supported_plugins
for plugin in data:
if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
Fix lv2 effect builder for travis build
|
import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
#supported_plugins = self._supported_plugins
for plugin in data:
# if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
|
<commit_before>import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
supported_plugins = self._supported_plugins
for plugin in data:
if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
<commit_msg>Fix lv2 effect builder for travis build<commit_after>
|
import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
#supported_plugins = self._supported_plugins
for plugin in data:
# if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
|
import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
supported_plugins = self._supported_plugins
for plugin in data:
if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
Fix lv2 effect builder for travis buildimport os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
#supported_plugins = self._supported_plugins
for plugin in data:
# if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
|
<commit_before>import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
supported_plugins = self._supported_plugins
for plugin in data:
if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
<commit_msg>Fix lv2 effect builder for travis build<commit_after>import os
import json
from pluginsmanager.model.lv2.lv2_plugin import Lv2Plugin
from pluginsmanager.model.lv2.lv2_effect import Lv2Effect
class Lv2EffectBuilder(object):
"""
Generates lv2 audio plugins instance (as :class:`Lv2Effect` object).
.. note::
In the current implementation, the data plugins are persisted
in *plugins.json*.
"""
def __init__(self, plugins_json=None):
self.plugins = {}
if plugins_json is None:
plugins_json = os.path.dirname(__file__) + '/plugins.json'
with open(plugins_json) as data_file:
data = json.load(data_file)
#supported_plugins = self._supported_plugins
for plugin in data:
# if plugin['uri'] in supported_plugins:
self.plugins[plugin['uri']] = Lv2Plugin(plugin)
@property
def _supported_plugins(self):
import subprocess
return str(subprocess.check_output(['lv2ls'])).split('\\n')
@property
def all(self):
return self.plugins
def build(self, lv2_uri):
"""
Returns a new :class:`Lv2Effect` by the valid lv2_uri
:param string lv2_uri:
:return Lv2Effect: Effect created
"""
return Lv2Effect(self.plugins[lv2_uri])
|
e1a61b1f286d7434f2b0d0740f10df8d4b441ec1
|
cloudaux/__about__.py
|
cloudaux/__about__.py
|
__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.15'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
|
__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.16'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
|
Bump version to 1.4.16 for previous merge
|
Bump version to 1.4.16 for previous merge
|
Python
|
apache-2.0
|
Netflix-Skunkworks/cloudaux
|
__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.15'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
Bump version to 1.4.16 for previous merge
|
__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.16'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
|
<commit_before>__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.15'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
<commit_msg>Bump version to 1.4.16 for previous merge<commit_after>
|
__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.16'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
|
__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.15'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
Bump version to 1.4.16 for previous merge__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.16'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
|
<commit_before>__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.15'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
<commit_msg>Bump version to 1.4.16 for previous merge<commit_after>__all__ = [
'__title__',
'__summary__',
'__uri__',
'__version__',
'__author__',
'__email__',
'__license__',
'__copyright__'
]
__title__ = 'cloudaux'
__summary__ = 'Cloud Auxiliary is a python wrapper and orchestration module for interacting with cloud providers'
__uri__ = 'https://github.com/Netflix-Skunkworks/cloudaux'
__version__ = '1.4.16'
__author__ = 'Patrick Kelley'
__email__ = 'patrick@netflix.com'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2017 %s' % __author__
|
33f050ab022626846510a7cbcd4b299612f2ff85
|
tvmaze/tests.py
|
tvmaze/tests.py
|
import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()
|
import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()
#python -m unittest discover -v
|
Add comment show test usage from console.
|
Add comment show test usage from console.
|
Python
|
mit
|
LairdStreak/MyPyPlayGround,LairdStreak/MyPyPlayGround,LairdStreak/MyPyPlayGround
|
import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()Add comment show test usage from console.
|
import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()
#python -m unittest discover -v
|
<commit_before>import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()<commit_msg>Add comment show test usage from console.<commit_after>
|
import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()
#python -m unittest discover -v
|
import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()Add comment show test usage from console.import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()
#python -m unittest discover -v
|
<commit_before>import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()<commit_msg>Add comment show test usage from console.<commit_after>import unittest
from tvmazereader import main
class TestMethods(unittest.TestCase):
def test_readerMain(self):
data = main()
self.assertEqual(len(data),2)
if __name__ == '__main__':
unittest.main()
#python -m unittest discover -v
|
61f162b7d5a8c9574261705b15b3f8deafffe7bc
|
froide/team/apps.py
|
froide/team/apps.py
|
from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
|
import json
from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from froide.account.export import registry
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
registry.register(export_user_data)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
def export_user_data(user):
from .models import TeamMembership
memberships = TeamMembership.objects.filter(
user=user
).select_related('team')
if not memberships:
return
yield ('teams.json', json.dumps([
{
'created': member.created.isoformat() if member.created else None,
'updated': member.updated.isoformat() if member.created else None,
'status': member.status,
'email': member.email,
'role': member.role,
'team_name': member.team.name,
'team_id': member.team_id,
}
for member in memberships]).encode('utf-8')
)
|
Add user data export for teams
|
Add user data export for teams
|
Python
|
mit
|
fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide
|
from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
Add user data export for teams
|
import json
from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from froide.account.export import registry
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
registry.register(export_user_data)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
def export_user_data(user):
from .models import TeamMembership
memberships = TeamMembership.objects.filter(
user=user
).select_related('team')
if not memberships:
return
yield ('teams.json', json.dumps([
{
'created': member.created.isoformat() if member.created else None,
'updated': member.updated.isoformat() if member.created else None,
'status': member.status,
'email': member.email,
'role': member.role,
'team_name': member.team.name,
'team_id': member.team_id,
}
for member in memberships]).encode('utf-8')
)
|
<commit_before>from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
<commit_msg>Add user data export for teams<commit_after>
|
import json
from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from froide.account.export import registry
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
registry.register(export_user_data)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
def export_user_data(user):
from .models import TeamMembership
memberships = TeamMembership.objects.filter(
user=user
).select_related('team')
if not memberships:
return
yield ('teams.json', json.dumps([
{
'created': member.created.isoformat() if member.created else None,
'updated': member.updated.isoformat() if member.created else None,
'status': member.status,
'email': member.email,
'role': member.role,
'team_name': member.team.name,
'team_id': member.team_id,
}
for member in memberships]).encode('utf-8')
)
|
from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
Add user data export for teamsimport json
from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from froide.account.export import registry
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
registry.register(export_user_data)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
def export_user_data(user):
from .models import TeamMembership
memberships = TeamMembership.objects.filter(
user=user
).select_related('team')
if not memberships:
return
yield ('teams.json', json.dumps([
{
'created': member.created.isoformat() if member.created else None,
'updated': member.updated.isoformat() if member.created else None,
'status': member.status,
'email': member.email,
'role': member.role,
'team_name': member.team.name,
'team_id': member.team_id,
}
for member in memberships]).encode('utf-8')
)
|
<commit_before>from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
<commit_msg>Add user data export for teams<commit_after>import json
from django.apps import AppConfig
from django.urls import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class TeamConfig(AppConfig):
name = 'froide.team'
verbose_name = _('Teams')
def ready(self):
from froide.account.menu import menu_registry, MenuItem
from froide.account import account_canceled
from froide.account.export import registry
from .services import can_use_team
def get_account_menu_item(request):
if not can_use_team(request.user):
return None
return MenuItem(
section='before_settings', order=0,
url=reverse('team-list'),
label=_('Your teams')
)
menu_registry.register(get_account_menu_item)
registry.register(export_user_data)
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import Team
if user is None:
return
# FIXME: teams may become owner-less
user.teammembership_set.all().delete()
# Remove teams with no members
Team.objects.all().annotate(
num_members=models.Count('members', distinct=True)
).filter(num_members=0).delete()
def export_user_data(user):
from .models import TeamMembership
memberships = TeamMembership.objects.filter(
user=user
).select_related('team')
if not memberships:
return
yield ('teams.json', json.dumps([
{
'created': member.created.isoformat() if member.created else None,
'updated': member.updated.isoformat() if member.created else None,
'status': member.status,
'email': member.email,
'role': member.role,
'team_name': member.team.name,
'team_id': member.team_id,
}
for member in memberships]).encode('utf-8')
)
|
0b7c27fec5b1b7ececfcf7556f415e8e53cf69b6
|
v1.0/v1.0/search.py
|
v1.0/v1.0/search.py
|
#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print linenum, l.rstrip()
break
|
#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
from __future__ import print_function
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print(linenum, l.rstrip())
break
|
Make conformance test 55 compatible with Python 3
|
Make conformance test 55 compatible with Python 3
|
Python
|
apache-2.0
|
curoverse/common-workflow-language,curoverse/common-workflow-language,mr-c/common-workflow-language,common-workflow-language/common-workflow-language,mr-c/common-workflow-language,dleehr/common-workflow-language,dleehr/common-workflow-language,common-workflow-language/common-workflow-language,dleehr/common-workflow-language,mr-c/common-workflow-language,common-workflow-language/common-workflow-language,common-workflow-language/common-workflow-language,dleehr/common-workflow-language
|
#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print linenum, l.rstrip()
break
Make conformance test 55 compatible with Python 3
|
#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
from __future__ import print_function
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print(linenum, l.rstrip())
break
|
<commit_before>#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print linenum, l.rstrip()
break
<commit_msg>Make conformance test 55 compatible with Python 3<commit_after>
|
#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
from __future__ import print_function
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print(linenum, l.rstrip())
break
|
#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print linenum, l.rstrip()
break
Make conformance test 55 compatible with Python 3#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
from __future__ import print_function
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print(linenum, l.rstrip())
break
|
<commit_before>#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print linenum, l.rstrip()
break
<commit_msg>Make conformance test 55 compatible with Python 3<commit_after>#!/usr/bin/env python
# Toy program to search inverted index and print out each line the term
# appears.
from __future__ import print_function
import sys
mainfile = sys.argv[1]
indexfile = sys.argv[1] + ".idx1"
term = sys.argv[2]
main = open(mainfile)
index = open(indexfile)
st = term + ": "
for a in index:
if a.startswith(st):
n = [int(i) for i in a[len(st):].split(", ") if i]
linenum = 0
for l in main:
linenum += 1
if linenum in n:
print(linenum, l.rstrip())
break
|
10246ab476980053131c9f2b852116793fd8e1cd
|
flask_mongorest/__init__.py
|
flask_mongorest/__init__.py
|
from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method])
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)])
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods])
return klass
return decorator
|
from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method], **kwargs)
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)], **kwargs)
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods], **kwargs)
return klass
return decorator
|
Allow passing extra kwargs into register decorator
|
Allow passing extra kwargs into register decorator
In order to support extra key-word arguments in add_url_rule method, e.g. subdomain.
|
Python
|
bsd-3-clause
|
elasticsales/flask-mongorest,DropD/flask-mongorest,elasticsales/flask-mongorest,DropD/flask-mongorest
|
from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method])
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)])
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods])
return klass
return decorator
Allow passing extra kwargs into register decorator
In order to support extra key-word arguments in add_url_rule method, e.g. subdomain.
|
from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method], **kwargs)
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)], **kwargs)
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods], **kwargs)
return klass
return decorator
|
<commit_before>from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method])
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)])
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods])
return klass
return decorator
<commit_msg>Allow passing extra kwargs into register decorator
In order to support extra key-word arguments in add_url_rule method, e.g. subdomain.<commit_after>
|
from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method], **kwargs)
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)], **kwargs)
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods], **kwargs)
return klass
return decorator
|
from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method])
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)])
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods])
return klass
return decorator
Allow passing extra kwargs into register decorator
In order to support extra key-word arguments in add_url_rule method, e.g. subdomain.from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method], **kwargs)
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)], **kwargs)
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods], **kwargs)
return klass
return decorator
|
<commit_before>from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method])
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)])
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods])
return klass
return decorator
<commit_msg>Allow passing extra kwargs into register decorator
In order to support extra key-word arguments in add_url_rule method, e.g. subdomain.<commit_after>from flask import Blueprint
from functools import wraps
from flask_mongorest.methods import Create, Update, BulkUpdate, Fetch, List, Delete
class MongoRest(object):
def __init__(self, app, **kwargs):
self.app = app
self.url_prefix = kwargs.pop('url_prefix', '')
app.register_blueprint(Blueprint(self.url_prefix, __name__, template_folder='templates'))
def register(self, **kwargs):
def decorator(klass):
document_name = klass.resource.document.__name__.lower()
name = kwargs.pop('name', document_name)
url = kwargs.pop('url', '/%s/' % document_name)
if self.url_prefix:
url = '%s%s' % (self.url_prefix, url)
pk_type = kwargs.pop('pk_type', 'string')
view_func = klass.as_view(name)
if List in klass.methods:
self.app.add_url_rule(url, defaults={'pk': None}, view_func=view_func, methods=[List.method], **kwargs)
if Create in klass.methods or BulkUpdate in klass.methods:
self.app.add_url_rule(url, view_func=view_func, methods=[x.method for x in klass.methods if x in (Create, BulkUpdate)], **kwargs)
self.app.add_url_rule('%s<%s:%s>/' % (url, pk_type, 'pk'), view_func=view_func, methods=[x.method for x in klass.methods], **kwargs)
return klass
return decorator
|
e1791d929bccd1f5e9382e45fb90bd8257ef597d
|
src/toil/version.py
|
src/toil/version.py
|
# Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version='3.0.6'
|
# Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '3.1.0a1'
|
Prepare next development cycle 3.1.0a1
|
Prepare next development cycle 3.1.0a1
|
Python
|
apache-2.0
|
BD2KGenomics/slugflow,BD2KGenomics/slugflow
|
# Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version='3.0.6'
Prepare next development cycle 3.1.0a1
|
# Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '3.1.0a1'
|
<commit_before># Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version='3.0.6'
<commit_msg>Prepare next development cycle 3.1.0a1<commit_after>
|
# Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '3.1.0a1'
|
# Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version='3.0.6'
Prepare next development cycle 3.1.0a1# Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '3.1.0a1'
|
<commit_before># Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version='3.0.6'
<commit_msg>Prepare next development cycle 3.1.0a1<commit_after># Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
version = '3.1.0a1'
|
7024d3b36176ec11142ee10884936ff329aece49
|
tests/test_cookiecutter_invocation.py
|
tests/test_cookiecutter_invocation.py
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(project_dir):
subprocess.check_call([
'python',
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
Set PYTHONPATH and use sys.executable
|
Set PYTHONPATH and use sys.executable
|
Python
|
bsd-3-clause
|
agconti/cookiecutter,stevepiercy/cookiecutter,sp1rs/cookiecutter,Vauxoo/cookiecutter,hackebrot/cookiecutter,hackebrot/cookiecutter,ramiroluz/cookiecutter,kkujawinski/cookiecutter,pjbull/cookiecutter,ramiroluz/cookiecutter,cguardia/cookiecutter,cguardia/cookiecutter,sp1rs/cookiecutter,venumech/cookiecutter,Vauxoo/cookiecutter,dajose/cookiecutter,takeflight/cookiecutter,moi65/cookiecutter,moi65/cookiecutter,audreyr/cookiecutter,michaeljoseph/cookiecutter,michaeljoseph/cookiecutter,Springerle/cookiecutter,takeflight/cookiecutter,willingc/cookiecutter,christabor/cookiecutter,terryjbates/cookiecutter,kkujawinski/cookiecutter,benthomasson/cookiecutter,audreyr/cookiecutter,atlassian/cookiecutter,terryjbates/cookiecutter,luzfcb/cookiecutter,stevepiercy/cookiecutter,luzfcb/cookiecutter,benthomasson/cookiecutter,christabor/cookiecutter,agconti/cookiecutter,atlassian/cookiecutter,willingc/cookiecutter,venumech/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,Springerle/cookiecutter
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(project_dir):
subprocess.check_call([
'python',
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
Set PYTHONPATH and use sys.executable
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
<commit_before># -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(project_dir):
subprocess.check_call([
'python',
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
<commit_msg>Set PYTHONPATH and use sys.executable<commit_after>
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(project_dir):
subprocess.check_call([
'python',
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
Set PYTHONPATH and use sys.executable# -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
<commit_before># -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(project_dir):
subprocess.check_call([
'python',
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
<commit_msg>Set PYTHONPATH and use sys.executable<commit_after># -*- coding: utf-8 -*-
"""
test_cookiecutter_invocation
----------------------------
Tests to make sure that cookiecutter can be called from the cli without
using the entry point set up for the package.
"""
import os
import pytest
import subprocess
import sys
from cookiecutter import utils
def test_should_raise_error_without_template_arg(capfd):
with pytest.raises(subprocess.CalledProcessError):
subprocess.check_call(['python', '-m', 'cookiecutter.cli'])
_, err = capfd.readouterr()
exp_message = 'Error: Missing argument "template".'
assert exp_message in err
@pytest.fixture
def project_dir(request):
"""Remove the rendered project directory created by the test."""
rendered_dir = 'fake-project-templated'
def remove_generated_project():
if os.path.isdir(rendered_dir):
utils.rmtree(rendered_dir)
request.addfinalizer(remove_generated_project)
return rendered_dir
def test_should_invoke_main(monkeypatch, project_dir):
monkeypatch.setenv('PYTHONPATH', '.')
subprocess.check_call([
sys.executable,
'-m',
'cookiecutter.cli',
'tests/fake-repo-tmpl',
'--no-input'
])
assert os.path.isdir(project_dir)
|
7127d138bacf507360b6b8c0386187d2e1be32a6
|
ifilter/__init__.py
|
ifilter/__init__.py
|
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
os.remove(f.name)
if __name__ == "__main__":
main()
|
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
try:
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
finally:
if f is not None:
os.remove(f.name)
if __name__ == "__main__":
main()
|
Add finally block for deletion of temp file
|
Add finally block for deletion of temp file
|
Python
|
apache-2.0
|
stefan-hudelmaier/ifilter
|
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
os.remove(f.name)
if __name__ == "__main__":
main()
Add finally block for deletion of temp file
|
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
try:
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
finally:
if f is not None:
os.remove(f.name)
if __name__ == "__main__":
main()
|
<commit_before>import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
os.remove(f.name)
if __name__ == "__main__":
main()
<commit_msg>Add finally block for deletion of temp file<commit_after>
|
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
try:
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
finally:
if f is not None:
os.remove(f.name)
if __name__ == "__main__":
main()
|
import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
os.remove(f.name)
if __name__ == "__main__":
main()
Add finally block for deletion of temp fileimport sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
try:
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
finally:
if f is not None:
os.remove(f.name)
if __name__ == "__main__":
main()
|
<commit_before>import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
os.remove(f.name)
if __name__ == "__main__":
main()
<commit_msg>Add finally block for deletion of temp file<commit_after>import sys
import tempfile
import os
from subprocess import call
import argparse
guide = """# Remove or modify lines.
# Lines that are prefixed with the # character are filtered out.
# When you are done, save the file and exit.
"""
description = """Interactively filter lines in a pipe.
Example: Delete selected files in a directory
find . -type f | ifilter | xargs rm
"""
def get_editor():
if "EDITOR" in os.environ:
return os.environ["EDITOR"]
if "VISUAL" in os.environ:
return os.environ["VISUAL"]
return "vi"
def main():
try:
parser = argparse.ArgumentParser(
prog='ifilter',
formatter_class=argparse.RawDescriptionHelpFormatter,
description=description)
# Currently args are unused
args = parser.parse_args()
s = sys.stdin.read()
f = tempfile.NamedTemporaryFile(delete=False)
f.write(guide)
f.write(s)
f.close()
editor = get_editor()
call("</dev/tty >/dev/tty %s %s " % (editor, f.name), shell=True)
with open(f.name, "r") as f:
for line in f.readlines():
if not line.startswith("#"):
print line,
finally:
if f is not None:
os.remove(f.name)
if __name__ == "__main__":
main()
|
aed451bc41ee09a9ff11f350881c320557fea71b
|
bin/debug/load_timeline_for_day_and_user.py
|
bin/debug/load_timeline_for_day_and_user.py
|
import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: %s <filename>" % (sys.argv[0])
fn = sys.argv[1]
print "Loading file " + fn
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
edb.get_timeseries_db().save(entry)
|
import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
import argparse
import uuid
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("-u", "--user_uuid",
help="overwrite the user UUID from the file")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
override_uuid = None
if args.user_uuid is not None:
override_uuid = uuid.uuid3(uuid.NAMESPACE_URL, "mailto:%s" % args.user_uuid.encode("UTF-8"))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
if args.user_uuid is not None:
entry["user_id"] = override_uuid
tsdb.save(entry)
|
Support loading with a specified username so that we can test more easily
|
Support loading with a specified username so that we can test more easily
Example timelines that can be used with the data are at:
https://github.com/shankari/data-collection-eval/tree/master/results_dec_2015/ucb.sdb.android.1/timeseries
Note that timeline dumps contain object IDS, so reloading the same timeline
with multiple usernames does not work since the `save` with the same objectID
results in the objects getting overwritten.
|
Python
|
bsd-3-clause
|
yw374cornell/e-mission-server,joshzarrabi/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,joshzarrabi/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,joshzarrabi/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,joshzarrabi/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server
|
import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: %s <filename>" % (sys.argv[0])
fn = sys.argv[1]
print "Loading file " + fn
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
edb.get_timeseries_db().save(entry)
Support loading with a specified username so that we can test more easily
Example timelines that can be used with the data are at:
https://github.com/shankari/data-collection-eval/tree/master/results_dec_2015/ucb.sdb.android.1/timeseries
Note that timeline dumps contain object IDS, so reloading the same timeline
with multiple usernames does not work since the `save` with the same objectID
results in the objects getting overwritten.
|
import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
import argparse
import uuid
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("-u", "--user_uuid",
help="overwrite the user UUID from the file")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
override_uuid = None
if args.user_uuid is not None:
override_uuid = uuid.uuid3(uuid.NAMESPACE_URL, "mailto:%s" % args.user_uuid.encode("UTF-8"))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
if args.user_uuid is not None:
entry["user_id"] = override_uuid
tsdb.save(entry)
|
<commit_before>import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: %s <filename>" % (sys.argv[0])
fn = sys.argv[1]
print "Loading file " + fn
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
edb.get_timeseries_db().save(entry)
<commit_msg>Support loading with a specified username so that we can test more easily
Example timelines that can be used with the data are at:
https://github.com/shankari/data-collection-eval/tree/master/results_dec_2015/ucb.sdb.android.1/timeseries
Note that timeline dumps contain object IDS, so reloading the same timeline
with multiple usernames does not work since the `save` with the same objectID
results in the objects getting overwritten.<commit_after>
|
import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
import argparse
import uuid
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("-u", "--user_uuid",
help="overwrite the user UUID from the file")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
override_uuid = None
if args.user_uuid is not None:
override_uuid = uuid.uuid3(uuid.NAMESPACE_URL, "mailto:%s" % args.user_uuid.encode("UTF-8"))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
if args.user_uuid is not None:
entry["user_id"] = override_uuid
tsdb.save(entry)
|
import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: %s <filename>" % (sys.argv[0])
fn = sys.argv[1]
print "Loading file " + fn
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
edb.get_timeseries_db().save(entry)
Support loading with a specified username so that we can test more easily
Example timelines that can be used with the data are at:
https://github.com/shankari/data-collection-eval/tree/master/results_dec_2015/ucb.sdb.android.1/timeseries
Note that timeline dumps contain object IDS, so reloading the same timeline
with multiple usernames does not work since the `save` with the same objectID
results in the objects getting overwritten.import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
import argparse
import uuid
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("-u", "--user_uuid",
help="overwrite the user UUID from the file")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
override_uuid = None
if args.user_uuid is not None:
override_uuid = uuid.uuid3(uuid.NAMESPACE_URL, "mailto:%s" % args.user_uuid.encode("UTF-8"))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
if args.user_uuid is not None:
entry["user_id"] = override_uuid
tsdb.save(entry)
|
<commit_before>import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: %s <filename>" % (sys.argv[0])
fn = sys.argv[1]
print "Loading file " + fn
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
edb.get_timeseries_db().save(entry)
<commit_msg>Support loading with a specified username so that we can test more easily
Example timelines that can be used with the data are at:
https://github.com/shankari/data-collection-eval/tree/master/results_dec_2015/ucb.sdb.android.1/timeseries
Note that timeline dumps contain object IDS, so reloading the same timeline
with multiple usernames does not work since the `save` with the same objectID
results in the objects getting overwritten.<commit_after>import json
import bson.json_util as bju
import emission.core.get_database as edb
import sys
import argparse
import uuid
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("timeline_filename",
help="the name of the file that contains the json representation of the timeline")
parser.add_argument("-u", "--user_uuid",
help="overwrite the user UUID from the file")
args = parser.parse_args()
fn = args.timeline_filename
print fn
print "Loading file " + fn
tsdb = edb.get_timeseries_db()
override_uuid = None
if args.user_uuid is not None:
override_uuid = uuid.uuid3(uuid.NAMESPACE_URL, "mailto:%s" % args.user_uuid.encode("UTF-8"))
entries = json.load(open(fn), object_hook = bju.object_hook)
for entry in entries:
if args.user_uuid is not None:
entry["user_id"] = override_uuid
tsdb.save(entry)
|
318cbaabb289034584cdfb82639c84ed91fc6e2e
|
tests/test_io.py
|
tests/test_io.py
|
import pytest
from pikepdf import Pdf
from io import BytesIO
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
|
import pytest
from pikepdf import Pdf
from pikepdf._cpphelpers import fspath
from io import BytesIO
from shutil import copy
import sys
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
@pytest.mark.skipif(sys.version_info < (3, 6), reason='pathlib and shutil')
def test_overwrite_input(resources, outdir):
copy(resources / 'sandwich.pdf', outdir / 'sandwich.pdf')
p = Pdf.open(outdir / 'sandwich.pdf')
with pytest.raises(ValueError, match=r'overwrite input file'):
p.save(outdir / 'sandwich.pdf')
|
Add test to check that we do not overwrite input file
|
Add test to check that we do not overwrite input file
|
Python
|
mpl-2.0
|
pikepdf/pikepdf,pikepdf/pikepdf,pikepdf/pikepdf
|
import pytest
from pikepdf import Pdf
from io import BytesIO
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
Add test to check that we do not overwrite input file
|
import pytest
from pikepdf import Pdf
from pikepdf._cpphelpers import fspath
from io import BytesIO
from shutil import copy
import sys
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
@pytest.mark.skipif(sys.version_info < (3, 6), reason='pathlib and shutil')
def test_overwrite_input(resources, outdir):
copy(resources / 'sandwich.pdf', outdir / 'sandwich.pdf')
p = Pdf.open(outdir / 'sandwich.pdf')
with pytest.raises(ValueError, match=r'overwrite input file'):
p.save(outdir / 'sandwich.pdf')
|
<commit_before>import pytest
from pikepdf import Pdf
from io import BytesIO
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
<commit_msg>Add test to check that we do not overwrite input file<commit_after>
|
import pytest
from pikepdf import Pdf
from pikepdf._cpphelpers import fspath
from io import BytesIO
from shutil import copy
import sys
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
@pytest.mark.skipif(sys.version_info < (3, 6), reason='pathlib and shutil')
def test_overwrite_input(resources, outdir):
copy(resources / 'sandwich.pdf', outdir / 'sandwich.pdf')
p = Pdf.open(outdir / 'sandwich.pdf')
with pytest.raises(ValueError, match=r'overwrite input file'):
p.save(outdir / 'sandwich.pdf')
|
import pytest
from pikepdf import Pdf
from io import BytesIO
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
Add test to check that we do not overwrite input fileimport pytest
from pikepdf import Pdf
from pikepdf._cpphelpers import fspath
from io import BytesIO
from shutil import copy
import sys
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
@pytest.mark.skipif(sys.version_info < (3, 6), reason='pathlib and shutil')
def test_overwrite_input(resources, outdir):
copy(resources / 'sandwich.pdf', outdir / 'sandwich.pdf')
p = Pdf.open(outdir / 'sandwich.pdf')
with pytest.raises(ValueError, match=r'overwrite input file'):
p.save(outdir / 'sandwich.pdf')
|
<commit_before>import pytest
from pikepdf import Pdf
from io import BytesIO
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
<commit_msg>Add test to check that we do not overwrite input file<commit_after>import pytest
from pikepdf import Pdf
from pikepdf._cpphelpers import fspath
from io import BytesIO
from shutil import copy
import sys
@pytest.fixture
def sandwich(resources):
# Has XMP, docinfo, <?adobe-xap-filters esc="CRLF"?>, shorthand attribute XMP
return Pdf.open(resources / 'sandwich.pdf')
class LimitedBytesIO(BytesIO):
"""Version of BytesIO that only accepts small reads/writes"""
def write(self, b):
amt = min(len(b), 100)
return super().write(b[:amt])
def test_weird_output_stream(sandwich):
bio = BytesIO()
lbio = LimitedBytesIO()
sandwich.save(bio, static_id=True)
sandwich.save(lbio, static_id=True)
assert bio.getvalue() == lbio.getvalue()
def test_overwrite_with_memory_file(outdir):
(outdir / 'example.pdf').touch()
pdf = Pdf.new()
pdf.save(outdir / 'example.pdf')
@pytest.mark.skipif(sys.version_info < (3, 6), reason='pathlib and shutil')
def test_overwrite_input(resources, outdir):
copy(resources / 'sandwich.pdf', outdir / 'sandwich.pdf')
p = Pdf.open(outdir / 'sandwich.pdf')
with pytest.raises(ValueError, match=r'overwrite input file'):
p.save(outdir / 'sandwich.pdf')
|
b8f6027f475025f1b04c26e1ff4e9c6ff659e362
|
templates/button.py
|
templates/button.py
|
from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 320
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
|
from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 640
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
|
Update TEXT_CHARACTER_LIMIT from 320 to 640 in ButtonTemplate
|
Update TEXT_CHARACTER_LIMIT from 320 to 640 in ButtonTemplate
|
Python
|
mit
|
jaskaransarkaria/JARVIS-on-Messenger,ZuZuD/JARVIS-on-Messenger,swapagarwal/JARVIS-on-Messenger,edadesd/JARVIS-on-Messenger
|
from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 320
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
Update TEXT_CHARACTER_LIMIT from 320 to 640 in ButtonTemplate
|
from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 640
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
|
<commit_before>from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 320
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
<commit_msg>Update TEXT_CHARACTER_LIMIT from 320 to 640 in ButtonTemplate<commit_after>
|
from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 640
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
|
from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 320
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
Update TEXT_CHARACTER_LIMIT from 320 to 640 in ButtonTemplatefrom copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 640
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
|
<commit_before>from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 320
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
<commit_msg>Update TEXT_CHARACTER_LIMIT from 320 to 640 in ButtonTemplate<commit_after>from copy import deepcopy as copy
from text import TextTemplate
TextTemplate.get_text = lambda self: self.get_message()['text']
TEXT_CHARACTER_LIMIT = 640
template = {
'template_type': 'button',
'value': {
'attachment': {
'type': 'template',
'payload': {
'template_type': 'button',
'text': '',
'buttons': []
}
}
}
}
class ButtonTemplate:
def __init__(self, text=''):
self.template = copy(template['value'])
self.text = text
def add_web_url(self, title='', url=''):
web_url_button = {}
web_url_button['type'] = 'web_url'
web_url_button['title'] = title
web_url_button['url'] = url
self.template['attachment']['payload']['buttons'].append(web_url_button)
def add_postback(self, title='', payload=''):
postback_button = {}
postback_button['type'] = 'postback'
postback_button['title'] = title
postback_button['payload'] = payload
self.template['attachment']['payload']['buttons'].append(postback_button)
def set_text(self, text=''):
self.text = text
def get_message(self):
self.template['attachment']['payload']['text'] = self.text
return self.template
|
2fa0c333cb92557b5ba39e91db41327ae381b6a7
|
Tools/px4params/xmlout.py
|
Tools/px4params/xmlout.py
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
xml_version = xml_document.createElement("version")
xml_parameters.appendChild(xml_version)
xml_version_value = xml_document.createTextNode("1")
xml_version.appendChild(xml_version_value)
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
Add version number to parameter meta data
|
Add version number to parameter meta data
|
Python
|
mit
|
darknight-007/Firmware,Aerotenna/Firmware,mcgill-robotics/Firmware,PX4/Firmware,PX4/Firmware,acfloria/Firmware,acfloria/Firmware,mcgill-robotics/Firmware,mcgill-robotics/Firmware,jlecoeur/Firmware,dagar/Firmware,PX4/Firmware,acfloria/Firmware,mje-nz/PX4-Firmware,dagar/Firmware,darknight-007/Firmware,PX4/Firmware,darknight-007/Firmware,dagar/Firmware,Aerotenna/Firmware,mcgill-robotics/Firmware,mje-nz/PX4-Firmware,krbeverx/Firmware,krbeverx/Firmware,PX4/Firmware,mcgill-robotics/Firmware,Aerotenna/Firmware,acfloria/Firmware,acfloria/Firmware,jlecoeur/Firmware,dagar/Firmware,krbeverx/Firmware,mje-nz/PX4-Firmware,Aerotenna/Firmware,Aerotenna/Firmware,darknight-007/Firmware,dagar/Firmware,dagar/Firmware,jlecoeur/Firmware,jlecoeur/Firmware,jlecoeur/Firmware,mje-nz/PX4-Firmware,Aerotenna/Firmware,acfloria/Firmware,Aerotenna/Firmware,acfloria/Firmware,PX4/Firmware,dagar/Firmware,jlecoeur/Firmware,krbeverx/Firmware,jlecoeur/Firmware,darknight-007/Firmware,mje-nz/PX4-Firmware,krbeverx/Firmware,mcgill-robotics/Firmware,mje-nz/PX4-Firmware,mje-nz/PX4-Firmware,krbeverx/Firmware,mcgill-robotics/Firmware,krbeverx/Firmware,PX4/Firmware,jlecoeur/Firmware
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
Add version number to parameter meta data
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
xml_version = xml_document.createElement("version")
xml_parameters.appendChild(xml_version)
xml_version_value = xml_document.createTextNode("1")
xml_version.appendChild(xml_version_value)
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
<commit_before>from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
<commit_msg>Add version number to parameter meta data<commit_after>
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
xml_version = xml_document.createElement("version")
xml_parameters.appendChild(xml_version)
xml_version_value = xml_document.createTextNode("1")
xml_version.appendChild(xml_version_value)
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
Add version number to parameter meta datafrom xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
xml_version = xml_document.createElement("version")
xml_parameters.appendChild(xml_version)
xml_version_value = xml_document.createTextNode("1")
xml_version.appendChild(xml_version_value)
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
<commit_before>from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
<commit_msg>Add version number to parameter meta data<commit_after>from xml.dom.minidom import getDOMImplementation
import codecs
class XMLOutput():
def __init__(self, groups):
impl = getDOMImplementation()
xml_document = impl.createDocument(None, "parameters", None)
xml_parameters = xml_document.documentElement
xml_version = xml_document.createElement("version")
xml_parameters.appendChild(xml_version)
xml_version_value = xml_document.createTextNode("1")
xml_version.appendChild(xml_version_value)
for group in groups:
xml_group = xml_document.createElement("group")
xml_group.setAttribute("name", group.GetName())
xml_parameters.appendChild(xml_group)
for param in group.GetParams():
xml_param = xml_document.createElement("parameter")
xml_group.appendChild(xml_param)
for code in param.GetFieldCodes():
value = param.GetFieldValue(code)
xml_field = xml_document.createElement(code)
xml_param.appendChild(xml_field)
xml_value = xml_document.createTextNode(value)
xml_field.appendChild(xml_value)
self.xml_document = xml_document
def Save(self, filename):
with codecs.open(filename, 'w', 'utf-8') as f:
self.xml_document.writexml(f, indent=" ", addindent=" ", newl="\n")
|
04703b3d13d512c1a4d1c24f6e8a02c6164f5d53
|
tests/test_utils.py
|
tests/test_utils.py
|
import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
assert created1
assert not created2
assert user1 == user2
|
import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
self.assertTrue(created1)
self.assertFalse(created2)
self.assertEquals(user1, user2)
|
Use class methods for unittests
|
Use class methods for unittests
|
Python
|
mit
|
Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary
|
import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
assert created1
assert not created2
assert user1 == user2Use class methods for unittests
|
import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
self.assertTrue(created1)
self.assertFalse(created2)
self.assertEquals(user1, user2)
|
<commit_before>import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
assert created1
assert not created2
assert user1 == user2<commit_msg>Use class methods for unittests<commit_after>
|
import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
self.assertTrue(created1)
self.assertFalse(created2)
self.assertEquals(user1, user2)
|
import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
assert created1
assert not created2
assert user1 == user2Use class methods for unittestsimport unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
self.assertTrue(created1)
self.assertFalse(created2)
self.assertEquals(user1, user2)
|
<commit_before>import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
assert created1
assert not created2
assert user1 == user2<commit_msg>Use class methods for unittests<commit_after>import unittest
from app import create_app, db
from app.utils import get_or_create
from app.models import User
class TestUtils(unittest.TestCase):
def setUp(self):
self.app = create_app("testing")
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_get_or_create(self):
user1, created1 = get_or_create(User, name="foo", social_id="bar")
db.session.add(user1)
db.session.commit()
user2, created2 = get_or_create(User, name="foo", social_id="bar")
self.assertTrue(created1)
self.assertFalse(created2)
self.assertEquals(user1, user2)
|
cadd4aa2bed67cad605937788d58e598ab1cdfc8
|
tistory/__init__.py
|
tistory/__init__.py
|
#!/usr/bin/env python3
import json
import os
from shlex import quote as shlex_quote
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
def load_config(fname):
fname = shlex_quote(fname)
cf_path = os.path.dirname(os.path.realpath(__file__))
abspath = os.path.abspath(os.path.join(cf_path, '../config/', fname))
with open(abspath) as data:
config = json.loads(data.read())
return config
if __name__ == "__main__":
pass
|
#!/usr/bin/env python3
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
if __name__ == "__main__":
pass
|
Remove load_config() from the tistory module
|
Remove load_config() from the tistory module
|
Python
|
mit
|
kastden/tistory
|
#!/usr/bin/env python3
import json
import os
from shlex import quote as shlex_quote
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
def load_config(fname):
fname = shlex_quote(fname)
cf_path = os.path.dirname(os.path.realpath(__file__))
abspath = os.path.abspath(os.path.join(cf_path, '../config/', fname))
with open(abspath) as data:
config = json.loads(data.read())
return config
if __name__ == "__main__":
pass
Remove load_config() from the tistory module
|
#!/usr/bin/env python3
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
if __name__ == "__main__":
pass
|
<commit_before>#!/usr/bin/env python3
import json
import os
from shlex import quote as shlex_quote
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
def load_config(fname):
fname = shlex_quote(fname)
cf_path = os.path.dirname(os.path.realpath(__file__))
abspath = os.path.abspath(os.path.join(cf_path, '../config/', fname))
with open(abspath) as data:
config = json.loads(data.read())
return config
if __name__ == "__main__":
pass
<commit_msg>Remove load_config() from the tistory module<commit_after>
|
#!/usr/bin/env python3
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
if __name__ == "__main__":
pass
|
#!/usr/bin/env python3
import json
import os
from shlex import quote as shlex_quote
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
def load_config(fname):
fname = shlex_quote(fname)
cf_path = os.path.dirname(os.path.realpath(__file__))
abspath = os.path.abspath(os.path.join(cf_path, '../config/', fname))
with open(abspath) as data:
config = json.loads(data.read())
return config
if __name__ == "__main__":
pass
Remove load_config() from the tistory module#!/usr/bin/env python3
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
if __name__ == "__main__":
pass
|
<commit_before>#!/usr/bin/env python3
import json
import os
from shlex import quote as shlex_quote
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
def load_config(fname):
fname = shlex_quote(fname)
cf_path = os.path.dirname(os.path.realpath(__file__))
abspath = os.path.abspath(os.path.join(cf_path, '../config/', fname))
with open(abspath) as data:
config = json.loads(data.read())
return config
if __name__ == "__main__":
pass
<commit_msg>Remove load_config() from the tistory module<commit_after>#!/usr/bin/env python3
from tistory.api import Tistory, TistoryError
from tistory.auth import TistoryOAuth2
if __name__ == "__main__":
pass
|
ec7e03b778c8f6b47af4647d440b4838221a4e33
|
jose/constants.py
|
jose/constants.py
|
import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = set([HS256, HS384, HS512])
RSA = set([RS256, RS384, RS512])
EC = set([ES256, ES384, ES512])
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
|
import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = {HS256, HS384, HS512}
RSA = {RS256, RS384, RS512}
EC = {ES256, ES384, ES512}
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
|
Replace function calls with set literals
|
Replace function calls with set literals
|
Python
|
mit
|
mpdavis/python-jose
|
import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = set([HS256, HS384, HS512])
RSA = set([RS256, RS384, RS512])
EC = set([ES256, ES384, ES512])
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
Replace function calls with set literals
|
import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = {HS256, HS384, HS512}
RSA = {RS256, RS384, RS512}
EC = {ES256, ES384, ES512}
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
|
<commit_before>import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = set([HS256, HS384, HS512])
RSA = set([RS256, RS384, RS512])
EC = set([ES256, ES384, ES512])
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
<commit_msg>Replace function calls with set literals<commit_after>
|
import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = {HS256, HS384, HS512}
RSA = {RS256, RS384, RS512}
EC = {ES256, ES384, ES512}
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
|
import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = set([HS256, HS384, HS512])
RSA = set([RS256, RS384, RS512])
EC = set([ES256, ES384, ES512])
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
Replace function calls with set literalsimport hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = {HS256, HS384, HS512}
RSA = {RS256, RS384, RS512}
EC = {ES256, ES384, ES512}
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
|
<commit_before>import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = set([HS256, HS384, HS512])
RSA = set([RS256, RS384, RS512])
EC = set([ES256, ES384, ES512])
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
<commit_msg>Replace function calls with set literals<commit_after>import hashlib
class Algorithms(object):
NONE = 'none'
HS256 = 'HS256'
HS384 = 'HS384'
HS512 = 'HS512'
RS256 = 'RS256'
RS384 = 'RS384'
RS512 = 'RS512'
ES256 = 'ES256'
ES384 = 'ES384'
ES512 = 'ES512'
HMAC = {HS256, HS384, HS512}
RSA = {RS256, RS384, RS512}
EC = {ES256, ES384, ES512}
SUPPORTED = HMAC.union(RSA).union(EC)
ALL = SUPPORTED.union([NONE])
HASHES = {
HS256: hashlib.sha256,
HS384: hashlib.sha384,
HS512: hashlib.sha512,
RS256: hashlib.sha256,
RS384: hashlib.sha384,
RS512: hashlib.sha512,
ES256: hashlib.sha256,
ES384: hashlib.sha384,
ES512: hashlib.sha512,
}
KEYS = {}
ALGORITHMS = Algorithms()
|
4db4eb6ce512b3356559fe3efc988627c3324838
|
nonbias_weight_decay.py
|
nonbias_weight_decay.py
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
|
# This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
|
Fix an error of passing unicode literals to cupy
|
Fix an error of passing unicode literals to cupy
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
Fix an error of passing unicode literals to cupy
|
# This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
|
<commit_before>from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
<commit_msg>Fix an error of passing unicode literals to cupy<commit_after>
|
# This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
|
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
Fix an error of passing unicode literals to cupy# This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
|
<commit_before>from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
<commit_msg>Fix an error of passing unicode literals to cupy<commit_after># This caused an error in py2 because cupy expect non-unicode str
# from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import int
from future import standard_library
standard_library.install_aliases()
from chainer import cuda
class NonbiasWeightDecay(object):
"""Optimizer hook function for weight decay regularization.
"""
name = 'NonbiasWeightDecay'
def __init__(self, rate):
self.rate = rate
def __call__(self, opt):
if cuda.available:
kernel = cuda.elementwise(
'T p, T decay', 'T g', 'g += decay * p', 'weight_decay')
rate = self.rate
for name, param in opt.target.namedparams():
if name == 'b' or name.endswith('/b'):
continue
p, g = param.data, param.grad
with cuda.get_device(p) as dev:
if int(dev) == -1:
g += rate * p
else:
kernel(p, rate, g)
|
570a1468796c6afdcbd77052227d9a155601e710
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
return app
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
from app.converters import WordClassConverter
app.url_map.converters["word_class"] = WordClassConverter
return app
|
Add word class converter to URL map
|
Add word class converter to URL map
|
Python
|
mit
|
Encrylize/MyDictionary,Encrylize/MyDictionary,Encrylize/MyDictionary
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
return appAdd word class converter to URL map
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
from app.converters import WordClassConverter
app.url_map.converters["word_class"] = WordClassConverter
return app
|
<commit_before>from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
return app<commit_msg>Add word class converter to URL map<commit_after>
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
from app.converters import WordClassConverter
app.url_map.converters["word_class"] = WordClassConverter
return app
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
return appAdd word class converter to URL mapfrom flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
from app.converters import WordClassConverter
app.url_map.converters["word_class"] = WordClassConverter
return app
|
<commit_before>from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
return app<commit_msg>Add word class converter to URL map<commit_after>from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_oauthlib.client import OAuth
from config import config
db = SQLAlchemy()
oa = OAuth()
lm = LoginManager()
lm.login_view = "main.login"
from app.models import User
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
db.init_app(app)
lm.init_app(app)
oa.init_app(app)
from app.views.main import main
from app.views.oauth import oauth
app.register_blueprint(main)
app.register_blueprint(oauth)
from app.converters import WordClassConverter
app.url_map.converters["word_class"] = WordClassConverter
return app
|
4de5994b977ca7a0cb9086def22432d72fec0f34
|
app/settings.py
|
app/settings.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton \u2620'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
Drop the unicode skull-and-crossbones from the project name. Causes problems too easily.
|
Drop the unicode skull-and-crossbones from the project name. Causes problems too easily.
|
Python
|
mit
|
peterhil/skeleton,peterhil/ninhursag,peterhil/skeleton,peterhil/ninhursag,peterhil/ninhursag,peterhil/skeleton,peterhil/ninhursag
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton \u2620'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
Drop the unicode skull-and-crossbones from the project name. Causes problems too easily.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton \u2620'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
<commit_msg>Drop the unicode skull-and-crossbones from the project name. Causes problems too easily.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton \u2620'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
Drop the unicode skull-and-crossbones from the project name. Causes problems too easily.#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton \u2620'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
<commit_msg>Drop the unicode skull-and-crossbones from the project name. Causes problems too easily.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Flask application default config:
# http://flask.pocoo.org/docs/config/#configuring-from-files
# https://github.com/mbr/flask-appconfig
project_name = u'Skeleton'
class Default(object):
DEBUG = False
TESTING = False
JS_LOG_LEVEL = 3 # log (1) < debug (2) < info (3) < warn (4) < error (5)
APP_NAME = project_name
# Servers and URLs
SERVER_NAME = 'localhost:5000'
# Authentication etc
SECRET_KEY = 'some-secret-key'
CSRF_ENABLED = True
# API
API_SERVER = 'localhost:5000'
API_TOKEN = 'some-api-token'
class Dev(Default):
DEBUG = True
APP_NAME = project_name + ' dev'
class Testing(Default):
TESTING = True
CSRF_ENABLED = False
class Production(Default):
pass
|
2e8373019f76a51da482df6415bbecd8b2f821ac
|
zephyrus/message.py
|
zephyrus/message.py
|
import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
|
import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@classmethod
def from_json(self, json_dict):
# TODO: missing attributes parsing
return Message(json_dict['sender'], json_dict['type'], json_dict['content'])
@classmethod
def from_string(self, json_str):
return Message.from_json(json.loads(json_str))
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
|
Add object creation methods for Message
|
Add object creation methods for Message
|
Python
|
mit
|
wairton/zephyrus-mas
|
import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
Add object creation methods for Message
|
import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@classmethod
def from_json(self, json_dict):
# TODO: missing attributes parsing
return Message(json_dict['sender'], json_dict['type'], json_dict['content'])
@classmethod
def from_string(self, json_str):
return Message.from_json(json.loads(json_str))
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
|
<commit_before>import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
<commit_msg>Add object creation methods for Message<commit_after>
|
import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@classmethod
def from_json(self, json_dict):
# TODO: missing attributes parsing
return Message(json_dict['sender'], json_dict['type'], json_dict['content'])
@classmethod
def from_string(self, json_str):
return Message.from_json(json.loads(json_str))
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
|
import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
Add object creation methods for Messageimport json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@classmethod
def from_json(self, json_dict):
# TODO: missing attributes parsing
return Message(json_dict['sender'], json_dict['type'], json_dict['content'])
@classmethod
def from_string(self, json_str):
return Message.from_json(json.loads(json_str))
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
|
<commit_before>import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
<commit_msg>Add object creation methods for Message<commit_after>import json
class Message:
def __init__(self, sender, message_type, content=None):
self.message = {
'sender': sender,
'type': message_type,
'content': content
}
def __str__(self):
return json.dumps(self.message)
def __repr__(self):
return "Message: %s" % self
@classmethod
def from_json(self, json_dict):
# TODO: missing attributes parsing
return Message(json_dict['sender'], json_dict['type'], json_dict['content'])
@classmethod
def from_string(self, json_str):
return Message.from_json(json.loads(json_str))
@property
def sender(self):
return self.message['sender']
@property
def type(self):
return self.message['type']
@property
def content(self):
return self.message['content']
class MessengerMeta(type):
def __new__(cls, clsname, supercls, attr_dict):
clsobj = super().__new__(cls, clsname, supercls, attr_dict)
if 'no_parameter_messages' not in attr_dict:
raise AttributeError("no_parameter_messages attribute must be defined")
for name, content in attr_dict['no_parameter_messages'].items():
fullname, body = MessengerMeta.get_method(name, content)
setattr(clsobj, fullname, body)
return clsobj
@staticmethod
def get_method(name, content):
def method(self):
return Message(self.sender, content)
return 'build_{}_message'.format(name), method
class Messenger(metaclass=MessengerMeta):
no_parameter_messages = {}
def __init__(self, sender: str):
self.sender = sender
|
cabc7da28989f1cc000f7219845222992846631a
|
datasets/templatetags/general_templatetags.py
|
datasets/templatetags/general_templatetags.py
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
return datetime.datetime.fromtimestamp(value)
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
if not value or value is None:
return None
return datetime.datetime.fromtimestamp(value)
|
Fix bug with timestamp_to_datetime when value is not a number
|
Fix bug with timestamp_to_datetime when value is not a number
|
Python
|
agpl-3.0
|
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
return datetime.datetime.fromtimestamp(value)
Fix bug with timestamp_to_datetime when value is not a number
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
if not value or value is None:
return None
return datetime.datetime.fromtimestamp(value)
|
<commit_before>from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
return datetime.datetime.fromtimestamp(value)
<commit_msg>Fix bug with timestamp_to_datetime when value is not a number<commit_after>
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
if not value or value is None:
return None
return datetime.datetime.fromtimestamp(value)
|
from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
return datetime.datetime.fromtimestamp(value)
Fix bug with timestamp_to_datetime when value is not a numberfrom django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
if not value or value is None:
return None
return datetime.datetime.fromtimestamp(value)
|
<commit_before>from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
return datetime.datetime.fromtimestamp(value)
<commit_msg>Fix bug with timestamp_to_datetime when value is not a number<commit_after>from django import template
from django.core import urlresolvers
import datetime
import time
register = template.Library()
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
@register.simple_tag(takes_context=True)
def current(context, url_name, return_value=' current', **kwargs):
matches = current_url_equals(context, url_name, **kwargs)
return return_value if matches else ''
# Adapted from: http://blog.scur.pl/2012/09/highlighting-current-active-page-django/
def current_url_equals(context, url_name, **kwargs):
resolved = False
try:
resolved = urlresolvers.resolve(context.get('request').path)
except:
pass
matches = resolved and resolved.url_name == url_name
if matches and kwargs:
for key in kwargs:
kwarg = kwargs.get(key)
resolved_kwarg = resolved.kwargs.get(key)
if not resolved_kwarg or kwarg != resolved_kwarg:
return False
return matches
@register.simple_tag(takes_context=True)
def active_if_current(context, url_name, return_value=' current', **kwargs):
if current(context, url_name, return_value, **kwargs):
return 'active '
else:
return ''
@register.filter()
def timestamp_to_datetime(value):
if not value or value is None:
return None
return datetime.datetime.fromtimestamp(value)
|
504045eb346fd8ff3ce968a3140520cff99165cc
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
AdminConfig.showAttribute(t1,'statementCacheSize' )
AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/'))
print dbs
# dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
#
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
Create documentation of DataSource Settings
|
8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
AdminConfig.showAttribute(t1,'statementCacheSize' )
AdminConfig.showAttribute(t1,'[statementCacheSize]' )8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/'))
print dbs
# dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
#
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
AdminConfig.showAttribute(t1,'statementCacheSize' )
AdminConfig.showAttribute(t1,'[statementCacheSize]' )<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/'))
print dbs
# dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
#
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
AdminConfig.showAttribute(t1,'statementCacheSize' )
AdminConfig.showAttribute(t1,'[statementCacheSize]' )8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/'))
print dbs
# dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
#
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.show( t1 )
print '\n\n'
AdminConfig.showall( t1 )
AdminConfig.showAttribute(t1,'statementCacheSize' )
AdminConfig.showAttribute(t1,'[statementCacheSize]' )<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/'))
print dbs
# dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check
#
# for db in dbs:
# t1 = ibmcnx.functions.getDSId( db )
# AdminConfig.show( t1 )
# print '\n\n'
# AdminConfig.showall( t1 )
# AdminConfig.showAttribute(t1,'statementCacheSize' )
# AdminConfig.showAttribute(t1,'[statementCacheSize]' )
|
4c5855b53bd3c9f945ea55291c2abdd94d309201
|
ibmcnx/doc/Documentation.py
|
ibmcnx/doc/Documentation.py
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
sys.stdout = open( filename, "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
if (os.path.isfile( fileopen )):
answer = raw_input( "File exists, Overwrite, Append or Abort? (O|A|X)" ).lower()
if answer == "o":
sys.stdout = open( filename, "w")
elif answer == "a":
sys.stdout = open( filename, "a")
else:
print "Exit"
sys.exit()
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
Create script to save documentation to a file
|
4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
sys.stdout = open( filename, "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
if (os.path.isfile( fileopen )):
answer = raw_input( "File exists, Overwrite, Append or Abort? (O|A|X)" ).lower()
if answer == "o":
sys.stdout = open( filename, "w")
elif answer == "a":
sys.stdout = open( filename, "a")
else:
print "Exit"
sys.exit()
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
<commit_before>######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
sys.stdout = open( filename, "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
if (os.path.isfile( fileopen )):
answer = raw_input( "File exists, Overwrite, Append or Abort? (O|A|X)" ).lower()
if answer == "o":
sys.stdout = open( filename, "w")
elif answer == "a":
sys.stdout = open( filename, "a")
else:
print "Exit"
sys.exit()
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
sys.stdout = open( filename, "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
if (os.path.isfile( fileopen )):
answer = raw_input( "File exists, Overwrite, Append or Abort? (O|A|X)" ).lower()
if answer == "o":
sys.stdout = open( filename, "w")
elif answer == "a":
sys.stdout = open( filename, "a")
else:
print "Exit"
sys.exit()
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
<commit_before>######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
sys.stdout = open( filename, "w")
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>######
# Create a file (html or markdown) with the output of
# - JVMHeap
# - LogFiles
# - Ports
# - Variables
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-08
#
# License: Apache 2.0
#
# TODO: Create a menu for file selection
import sys
import os.path
filename = raw_input( 'Path and Filename to Documentation file: ' )
if (os.path.isfile( fileopen )):
answer = raw_input( "File exists, Overwrite, Append or Abort? (O|A|X)" ).lower()
if answer == "o":
sys.stdout = open( filename, "w")
elif answer == "a":
sys.stdout = open( filename, "a")
else:
print "Exit"
sys.exit()
print '# JVM Settings of all AppServers:'
execfile( 'ibmcnx/doc/JVMSettings.py' )
print '# Used Ports:'
execfile( 'ibmcnx/doc/Ports.py' )
print '# LogFile Settgins:'
execfile( 'ibmcnx/doc/LogFiles.py' )
print '# WebSphere Variables'
execfile( 'ibmcnx/doc/Variables.py' )
|
ef93478a0bb4f4eaea470e96b740d55bf8b6f3b5
|
python/ecep/portal/management/commands/populate_availability.py
|
python/ecep/portal/management/commands/populate_availability.py
|
import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
print l.site_name
|
import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
l.availability = row['Availability']
l.save()
print l.availability
print ''
except:
print 'Uh oh!'
|
Add logic for availability population
|
Add logic for availability population
|
Python
|
mit
|
smartchicago/chicago-early-learning,smartchicago/chicago-early-learning,smartchicago/chicago-early-learning,smartchicago/chicago-early-learning
|
import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
print l.site_name
Add logic for availability population
|
import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
l.availability = row['Availability']
l.save()
print l.availability
print ''
except:
print 'Uh oh!'
|
<commit_before>import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
print l.site_name
<commit_msg>Add logic for availability population<commit_after>
|
import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
l.availability = row['Availability']
l.save()
print l.availability
print ''
except:
print 'Uh oh!'
|
import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
print l.site_name
Add logic for availability populationimport random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
l.availability = row['Availability']
l.save()
print l.availability
print ''
except:
print 'Uh oh!'
|
<commit_before>import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
print l.site_name
<commit_msg>Add logic for availability population<commit_after>import random
import csv
from django.core.management.base import BaseCommand, CommandError
from portal.models import Location
class Command(BaseCommand):
"""
"""
def handle(self, *args, **options):
"""
"""
with open('availability.csv', 'rb') as availability_file:
reader = csv.DictReader(availability_file)
for row in reader:
try:
key = row['Key']
l = Location.objects.get(ecm_key=key)
l.availability = row['Availability']
l.save()
print l.availability
print ''
except:
print 'Uh oh!'
|
afb4e0d036fd93ba5e2c02e5d935452ab1a22e4e
|
emission/core/wrapper/cleanedtrip.py
|
emission/core/wrapper/cleanedtrip.py
|
import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM,
"distance": ecwb.WrapperBase.Access.WORM,
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()
|
import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()
|
Remove the distance from the cleaned trip
|
Remove the distance from the cleaned trip
Since it is already in the base class (trip) and has been there since the very
first wrapper class commit.
https://github.com/e-mission/e-mission-server/commit/c4251f5de5dc65f0ddd458dc909c111ddec67153
|
Python
|
bsd-3-clause
|
shankari/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server
|
import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM,
"distance": ecwb.WrapperBase.Access.WORM,
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()Remove the distance from the cleaned trip
Since it is already in the base class (trip) and has been there since the very
first wrapper class commit.
https://github.com/e-mission/e-mission-server/commit/c4251f5de5dc65f0ddd458dc909c111ddec67153
|
import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()
|
<commit_before>import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM,
"distance": ecwb.WrapperBase.Access.WORM,
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()<commit_msg>Remove the distance from the cleaned trip
Since it is already in the base class (trip) and has been there since the very
first wrapper class commit.
https://github.com/e-mission/e-mission-server/commit/c4251f5de5dc65f0ddd458dc909c111ddec67153<commit_after>
|
import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()
|
import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM,
"distance": ecwb.WrapperBase.Access.WORM,
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()Remove the distance from the cleaned trip
Since it is already in the base class (trip) and has been there since the very
first wrapper class commit.
https://github.com/e-mission/e-mission-server/commit/c4251f5de5dc65f0ddd458dc909c111ddec67153import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()
|
<commit_before>import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM,
"distance": ecwb.WrapperBase.Access.WORM,
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()<commit_msg>Remove the distance from the cleaned trip
Since it is already in the base class (trip) and has been there since the very
first wrapper class commit.
https://github.com/e-mission/e-mission-server/commit/c4251f5de5dc65f0ddd458dc909c111ddec67153<commit_after>import emission.core.wrapper.trip as ecwt
import emission.core.wrapper.wrapperbase as ecwb
class Cleanedtrip(ecwt.Trip):
props = ecwt.Trip.props
props.update({"raw_trip": ecwb.WrapperBase.Access.WORM
})
def _populateDependencies(self):
super(Cleanedtrip, self)._populateDependencies()
|
4f8a84171bdbe24701351a54230768069a5f27fc
|
deployments/prob140/image/ipython_config.py
|
deployments/prob140/image/ipython_config.py
|
# Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
|
# Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
# Use memory for notebook notary file to workaround corrupted files on nfs
# https://www.sqlite.org/inmemorydb.html
# https://github.com/jupyter/jupyter/issues/174
# https://github.com/ipython/ipython/issues/9163
c.NotebookNotary.db_file = ":memory:"
|
Use memory for notebook notary file.
|
Use memory for notebook notary file.
Workaround possible file integrity issues.
|
Python
|
bsd-3-clause
|
ryanlovett/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub,berkeley-dsep-infra/datahub,ryanlovett/datahub,ryanlovett/datahub
|
# Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
Use memory for notebook notary file.
Workaround possible file integrity issues.
|
# Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
# Use memory for notebook notary file to workaround corrupted files on nfs
# https://www.sqlite.org/inmemorydb.html
# https://github.com/jupyter/jupyter/issues/174
# https://github.com/ipython/ipython/issues/9163
c.NotebookNotary.db_file = ":memory:"
|
<commit_before># Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
<commit_msg>Use memory for notebook notary file.
Workaround possible file integrity issues.<commit_after>
|
# Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
# Use memory for notebook notary file to workaround corrupted files on nfs
# https://www.sqlite.org/inmemorydb.html
# https://github.com/jupyter/jupyter/issues/174
# https://github.com/ipython/ipython/issues/9163
c.NotebookNotary.db_file = ":memory:"
|
# Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
Use memory for notebook notary file.
Workaround possible file integrity issues.# Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
# Use memory for notebook notary file to workaround corrupted files on nfs
# https://www.sqlite.org/inmemorydb.html
# https://github.com/jupyter/jupyter/issues/174
# https://github.com/ipython/ipython/issues/9163
c.NotebookNotary.db_file = ":memory:"
|
<commit_before># Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
<commit_msg>Use memory for notebook notary file.
Workaround possible file integrity issues.<commit_after># Disable history manager, we don't really use it
# and by default it puts an sqlite file on NFS, which is not something we wanna do
c.Historymanager.enabled = False
# Use memory for notebook notary file to workaround corrupted files on nfs
# https://www.sqlite.org/inmemorydb.html
# https://github.com/jupyter/jupyter/issues/174
# https://github.com/ipython/ipython/issues/9163
c.NotebookNotary.db_file = ":memory:"
|
787c8a1f1f000b75095fab5cc6b3e8e5d4ef60d8
|
usingnamespace/models/Domain.py
|
usingnamespace/models/Domain.py
|
# File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Table,
Integer,
Unicode,
PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', Unicode(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
# File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
PrimaryKeyConstraint,
String,
Table,
Unicode,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', String(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
Change from unicode to string for domain
|
Change from unicode to string for domain
DNS entries won't contain unicode characters, and by default are ASCII.
|
Python
|
isc
|
usingnamespace/usingnamespace
|
# File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Table,
Integer,
Unicode,
PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', Unicode(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
Change from unicode to string for domain
DNS entries won't contain unicode characters, and by default are ASCII.
|
# File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
PrimaryKeyConstraint,
String,
Table,
Unicode,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', String(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
<commit_before># File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Table,
Integer,
Unicode,
PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', Unicode(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
<commit_msg>Change from unicode to string for domain
DNS entries won't contain unicode characters, and by default are ASCII.<commit_after>
|
# File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
PrimaryKeyConstraint,
String,
Table,
Unicode,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', String(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
# File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Table,
Integer,
Unicode,
PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', Unicode(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
Change from unicode to string for domain
DNS entries won't contain unicode characters, and by default are ASCII.# File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
PrimaryKeyConstraint,
String,
Table,
Unicode,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', String(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
<commit_before># File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Table,
Integer,
Unicode,
PrimaryKeyConstraint,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', Unicode(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
<commit_msg>Change from unicode to string for domain
DNS entries won't contain unicode characters, and by default are ASCII.<commit_after># File: Domain.py
# Author: Bert JW Regeer <bertjw@regeer.org>
# Created: 2013-09-02
from meta import Base
from sqlalchemy import (
Boolean,
Column,
ForeignKey,
Integer,
PrimaryKeyConstraint,
String,
Table,
Unicode,
UniqueConstraint,
)
class Domain(Base):
__table__ = Table('domains', Base.metadata,
Column('id', Integer, primary_key=True, index=True),
Column('domain', String(256), index=True, unique=True),
Column('owner', Integer, ForeignKey('users.id', onupdate="CASCADE", ondelete="RESTRICT"), nullable=False),
)
|
f1d076b4e4fc834a4336141025387862b4decc5b
|
utest/libdoc/test_libdoc_api.py
|
utest/libdoc/test_libdoc_api.py
|
from io import StringIO
import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
|
import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
from robot.utils import StringIO
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
|
Fix Libdoc API unit tests on Python 2
|
Fix Libdoc API unit tests on Python 2
|
Python
|
apache-2.0
|
robotframework/robotframework,robotframework/robotframework,HelioGuilherme66/robotframework,HelioGuilherme66/robotframework,robotframework/robotframework,HelioGuilherme66/robotframework
|
from io import StringIO
import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
Fix Libdoc API unit tests on Python 2
|
import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
from robot.utils import StringIO
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
|
<commit_before>from io import StringIO
import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix Libdoc API unit tests on Python 2<commit_after>
|
import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
from robot.utils import StringIO
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
|
from io import StringIO
import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
Fix Libdoc API unit tests on Python 2import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
from robot.utils import StringIO
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
|
<commit_before>from io import StringIO
import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix Libdoc API unit tests on Python 2<commit_after>import sys
import tempfile
import unittest
from robot import libdoc
from robot.utils.asserts import assert_equal
from robot.utils import StringIO
class TestLibdoc(unittest.TestCase):
def setUp(self):
sys.stdout = StringIO()
def test_html(self):
output = tempfile.mkstemp(suffix='.html')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert '"name": "String"' in f.read()
def test_xml(self):
output = tempfile.mkstemp(suffix='.xml')[1]
libdoc.libdoc('String', output)
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def test_format(self):
output = tempfile.mkstemp()[1]
libdoc.libdoc('String', output, format='xml')
assert_equal(sys.stdout.getvalue().strip(), output)
with open(output) as f:
assert 'name="String"' in f.read()
def tearDown(self):
sys.stdout = sys.__stdout__
if __name__ == '__main__':
unittest.main()
|
1a361002b974ee9fc4f728339ea6f4d63eeb9bf1
|
binary.py
|
binary.py
|
#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in xrange(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in xrange(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
|
#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in range(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in range(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
|
Use 'range' rather than 'xrange' for Python 3 compatibility.
|
Use 'range' rather than 'xrange' for Python 3 compatibility.
|
Python
|
bsd-2-clause
|
direvus/btern
|
#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in xrange(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in xrange(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
Use 'range' rather than 'xrange' for Python 3 compatibility.
|
#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in range(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in range(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in xrange(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in xrange(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
<commit_msg>Use 'range' rather than 'xrange' for Python 3 compatibility.<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in range(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in range(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
|
#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in xrange(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in xrange(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
Use 'range' rather than 'xrange' for Python 3 compatibility.#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in range(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in range(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
|
<commit_before>#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in xrange(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in xrange(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
<commit_msg>Use 'range' rather than 'xrange' for Python 3 compatibility.<commit_after>#!/usr/bin/env python
# coding=utf-8
"""
Balanced ternary binary encoding
============
Tools for encoding balanced ternary data into binary formats and back again.
The encoded scheme used here uses 8-bit segments to represent 5-trit segments.
Each 5-trit segment is mapped to an 8-bit binary value which corresponds to its
value as an unsigned integer.
So, for example, consider the trit sequence:
0-00+
This sequence interpreted as a signed number is decimal -70, and as an unsigned
number decimal 51. We encode it using the byte value 51, or hex 0x33, which
gives:
0011 0011
If a trit sequence is not evenly divisible into 5-trit segments, the final
segment is padded to 5 trits by adding '-' trits to the left.
"""
from . import trit, integer
def encode(source):
result = b''
for i in range(0, len(source), 5):
value = integer.UInt(source[i:i+5], 5)
result += chr(value)
return result
def decode(binary):
trits = trit.Trits('')
for i in range(len(binary)):
value = ord(binary[i])
if value > 242:
raise ValueError(
"Invalid byte at position {}: {:#02x}".format(i, value))
trits += integer.UInt(value, 5)
return trits
|
b52e32cba060e5a51f2f012d3cad7cddd7dde3cc
|
lc0345_reverse_vowels_of_a_string.py
|
lc0345_reverse_vowels_of_a_string.py
|
"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class SolutionReversedVowelPosDict(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
Time complexity: O(n).
Space complexity: O(n).
"""
if not s:
return ''
n = len(s)
# Use set to collect vowels for quick lookup.
vowels = set(['A', 'E', 'I', 'O', 'U',
'a', 'e', 'i', 'o', 'u'])
# Collect vowles & positions in a dict: pos->vowel, in reversed order.
vowel_chars = []
vowel_pos = []
for pos, c in enumerate(s):
if c in vowels:
vowel_chars.append(c)
vowel_pos.append(pos)
rev_vowel_pos = dict()
for i, c in enumerate(reversed(vowel_chars)):
rev_vowel_pos[vowel_pos[i]] = c
# Iterate through string list, replace vowel by dict: pos-vowel.
s_list = list(s)
for i in range(n):
if i in rev_vowel_pos:
s_list[i] = rev_vowel_pos[i]
return ''.join(s_list)
def main():
# Output: "holle"
s = "hello"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "leotcede"
s = "leetcode"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "epplA"
s = "Apple"
print SolutionReversedVowelPosDict().reverseVowels(s)
if __name__ == '__main__':
main()
|
Complete reversed vowel pos sol
|
Complete reversed vowel pos sol
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
pass
if __name__ == '__main__':
main()
Complete reversed vowel pos sol
|
"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class SolutionReversedVowelPosDict(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
Time complexity: O(n).
Space complexity: O(n).
"""
if not s:
return ''
n = len(s)
# Use set to collect vowels for quick lookup.
vowels = set(['A', 'E', 'I', 'O', 'U',
'a', 'e', 'i', 'o', 'u'])
# Collect vowles & positions in a dict: pos->vowel, in reversed order.
vowel_chars = []
vowel_pos = []
for pos, c in enumerate(s):
if c in vowels:
vowel_chars.append(c)
vowel_pos.append(pos)
rev_vowel_pos = dict()
for i, c in enumerate(reversed(vowel_chars)):
rev_vowel_pos[vowel_pos[i]] = c
# Iterate through string list, replace vowel by dict: pos-vowel.
s_list = list(s)
for i in range(n):
if i in rev_vowel_pos:
s_list[i] = rev_vowel_pos[i]
return ''.join(s_list)
def main():
# Output: "holle"
s = "hello"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "leotcede"
s = "leetcode"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "epplA"
s = "Apple"
print SolutionReversedVowelPosDict().reverseVowels(s)
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete reversed vowel pos sol<commit_after>
|
"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class SolutionReversedVowelPosDict(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
Time complexity: O(n).
Space complexity: O(n).
"""
if not s:
return ''
n = len(s)
# Use set to collect vowels for quick lookup.
vowels = set(['A', 'E', 'I', 'O', 'U',
'a', 'e', 'i', 'o', 'u'])
# Collect vowles & positions in a dict: pos->vowel, in reversed order.
vowel_chars = []
vowel_pos = []
for pos, c in enumerate(s):
if c in vowels:
vowel_chars.append(c)
vowel_pos.append(pos)
rev_vowel_pos = dict()
for i, c in enumerate(reversed(vowel_chars)):
rev_vowel_pos[vowel_pos[i]] = c
# Iterate through string list, replace vowel by dict: pos-vowel.
s_list = list(s)
for i in range(n):
if i in rev_vowel_pos:
s_list[i] = rev_vowel_pos[i]
return ''.join(s_list)
def main():
# Output: "holle"
s = "hello"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "leotcede"
s = "leetcode"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "epplA"
s = "Apple"
print SolutionReversedVowelPosDict().reverseVowels(s)
if __name__ == '__main__':
main()
|
"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
pass
if __name__ == '__main__':
main()
Complete reversed vowel pos sol"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class SolutionReversedVowelPosDict(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
Time complexity: O(n).
Space complexity: O(n).
"""
if not s:
return ''
n = len(s)
# Use set to collect vowels for quick lookup.
vowels = set(['A', 'E', 'I', 'O', 'U',
'a', 'e', 'i', 'o', 'u'])
# Collect vowles & positions in a dict: pos->vowel, in reversed order.
vowel_chars = []
vowel_pos = []
for pos, c in enumerate(s):
if c in vowels:
vowel_chars.append(c)
vowel_pos.append(pos)
rev_vowel_pos = dict()
for i, c in enumerate(reversed(vowel_chars)):
rev_vowel_pos[vowel_pos[i]] = c
# Iterate through string list, replace vowel by dict: pos-vowel.
s_list = list(s)
for i in range(n):
if i in rev_vowel_pos:
s_list[i] = rev_vowel_pos[i]
return ''.join(s_list)
def main():
# Output: "holle"
s = "hello"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "leotcede"
s = "leetcode"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "epplA"
s = "Apple"
print SolutionReversedVowelPosDict().reverseVowels(s)
if __name__ == '__main__':
main()
|
<commit_before>"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class Solution(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete reversed vowel pos sol<commit_after>"""Leetcode 345. Reverse Vowels of a String
Easy
URL: https://leetcode.com/problems/reverse-vowels-of-a-string/
Write a function that takes a string as input and reverse only the vowels of
a string.
Example 1:
Input: "hello"
Output: "holle"
Example 2:
Input: "leetcode"
Output: "leotcede"
Note:
The vowels does not include the letter "y".
"""
class SolutionReversedVowelPosDict(object):
def reverseVowels(self, s):
"""
:type s: str
:rtype: str
Time complexity: O(n).
Space complexity: O(n).
"""
if not s:
return ''
n = len(s)
# Use set to collect vowels for quick lookup.
vowels = set(['A', 'E', 'I', 'O', 'U',
'a', 'e', 'i', 'o', 'u'])
# Collect vowles & positions in a dict: pos->vowel, in reversed order.
vowel_chars = []
vowel_pos = []
for pos, c in enumerate(s):
if c in vowels:
vowel_chars.append(c)
vowel_pos.append(pos)
rev_vowel_pos = dict()
for i, c in enumerate(reversed(vowel_chars)):
rev_vowel_pos[vowel_pos[i]] = c
# Iterate through string list, replace vowel by dict: pos-vowel.
s_list = list(s)
for i in range(n):
if i in rev_vowel_pos:
s_list[i] = rev_vowel_pos[i]
return ''.join(s_list)
def main():
# Output: "holle"
s = "hello"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "leotcede"
s = "leetcode"
print SolutionReversedVowelPosDict().reverseVowels(s)
# Output: "epplA"
s = "Apple"
print SolutionReversedVowelPosDict().reverseVowels(s)
if __name__ == '__main__':
main()
|
bda9bc3574b14ead6f51e1fb0f6864e07ccefd88
|
Orange/classification/random_forest.py
|
Orange/classification/random_forest.py
|
# import numpy
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
# TODO: implement sending a single decision tree
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
rf_model = RandomForest(**self.params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
|
import numbers
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
params = dict(self.params)
max_features = params["max_features"]
if isinstance(max_features, numbers.Integral) and \
X.shape[1] < max_features:
params["max_features"] = X.shape[1]
rf_model = RandomForest(**params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
|
Fix an error when number of predictor columns is less than max_features.
|
Fix an error when number of predictor columns is less than max_features.
|
Python
|
bsd-2-clause
|
marinkaz/orange3,marinkaz/orange3,kwikadi/orange3,marinkaz/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,kwikadi/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,qPCR4vir/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3,qusp/orange3,cheral/orange3,qusp/orange3,kwikadi/orange3,qusp/orange3,kwikadi/orange3,qusp/orange3,cheral/orange3,qPCR4vir/orange3,cheral/orange3,qPCR4vir/orange3,marinkaz/orange3
|
# import numpy
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
# TODO: implement sending a single decision tree
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
rf_model = RandomForest(**self.params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
Fix an error when number of predictor columns is less than max_features.
|
import numbers
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
params = dict(self.params)
max_features = params["max_features"]
if isinstance(max_features, numbers.Integral) and \
X.shape[1] < max_features:
params["max_features"] = X.shape[1]
rf_model = RandomForest(**params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
|
<commit_before># import numpy
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
# TODO: implement sending a single decision tree
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
rf_model = RandomForest(**self.params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
<commit_msg>Fix an error when number of predictor columns is less than max_features.<commit_after>
|
import numbers
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
params = dict(self.params)
max_features = params["max_features"]
if isinstance(max_features, numbers.Integral) and \
X.shape[1] < max_features:
params["max_features"] = X.shape[1]
rf_model = RandomForest(**params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
|
# import numpy
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
# TODO: implement sending a single decision tree
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
rf_model = RandomForest(**self.params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
Fix an error when number of predictor columns is less than max_features.import numbers
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
params = dict(self.params)
max_features = params["max_features"]
if isinstance(max_features, numbers.Integral) and \
X.shape[1] < max_features:
params["max_features"] = X.shape[1]
rf_model = RandomForest(**params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
|
<commit_before># import numpy
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
# TODO: implement sending a single decision tree
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
rf_model = RandomForest(**self.params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
<commit_msg>Fix an error when number of predictor columns is less than max_features.<commit_after>import numbers
from sklearn.ensemble import RandomForestClassifier as RandomForest
from sklearn.preprocessing import Imputer
from numpy import isnan
import Orange.data
import Orange.classification
def replace_nan(X, imp_model):
# Default scikit Imputer
# Use Orange imputer when implemented
if isnan(X).sum():
X = imp_model.transform(X)
return X
class RandomForestLearner(Orange.classification.SklFitter):
def __init__(self, n_estimators=10, max_features="auto",
random_state=None, max_depth=3, max_leaf_nodes=5):
self.params = vars()
def fit(self, X, Y, W):
self.imputer = Imputer()
self.imputer.fit(X)
X = replace_nan(X, self.imputer)
params = dict(self.params)
max_features = params["max_features"]
if isinstance(max_features, numbers.Integral) and \
X.shape[1] < max_features:
params["max_features"] = X.shape[1]
rf_model = RandomForest(**params)
rf_model.fit(X, Y.ravel())
return RandomForestClassifier(rf_model, self.imputer)
class RandomForestClassifier(Orange.classification.SklModel):
def __init__(self, clf, imp):
self.clf = clf
self.imputer = imp
def predict(self, X):
X = replace_nan(X, imp_model=self.imputer)
value = self.clf.predict(X)
prob = self.clf.predict_proba(X)
return value, prob
|
83e0394dc837e55a3ed544e54f6e84954f9311b0
|
onepercentclub/settings/travis.py
|
onepercentclub/settings/travis.py
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
ROOT_URLCONF = 'onepercentclub.urls'
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
Disable front end tests on Travis for now.
|
Disable front end tests on Travis for now.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
ROOT_URLCONF = 'onepercentclub.urls'
Disable front end tests on Travis for now.
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
<commit_before># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
ROOT_URLCONF = 'onepercentclub.urls'
<commit_msg>Disable front end tests on Travis for now.<commit_after>
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
ROOT_URLCONF = 'onepercentclub.urls'
Disable front end tests on Travis for now.# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
<commit_before># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
ROOT_URLCONF = 'onepercentclub.urls'
<commit_msg>Disable front end tests on Travis for now.<commit_after># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
124aeb597db4f4a0aa1c6d6117fe8d2facb4aacd
|
linkatos/activities.py
|
linkatos/activities.py
|
import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
print('expecting_reaction: ', expecting_reaction)
if 'type' in event:
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
|
import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
|
Delete condition of type in event as it should always be true
|
feat: Delete condition of type in event as it should always be true
|
Python
|
mit
|
iwi/linkatos,iwi/linkatos
|
import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
print('expecting_reaction: ', expecting_reaction)
if 'type' in event:
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
feat: Delete condition of type in event as it should always be true
|
import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
|
<commit_before>import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
print('expecting_reaction: ', expecting_reaction)
if 'type' in event:
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
<commit_msg>feat: Delete condition of type in event as it should always be true<commit_after>
|
import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
|
import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
print('expecting_reaction: ', expecting_reaction)
if 'type' in event:
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
feat: Delete condition of type in event as it should always be trueimport time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
|
<commit_before>import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
print('expecting_reaction: ', expecting_reaction)
if 'type' in event:
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
<commit_msg>feat: Delete condition of type in event as it should always be true<commit_after>import time
import linkatos.parser as parser
import linkatos.printer as printer
import linkatos.firebase as fb
import linkatos.reaction as react
def is_empty(events):
return ((events is None) or (len(events) == 0))
def is_url(url_message):
return url_message['type'] == 'url'
def event_consumer(expecting_url, expecting_reaction, parsed_url_message,
slack_client, fb_credentials, firebase):
# Read slack events
events = slack_client.rtm_read()
time.sleep(1) # 1 second delay after reading
if is_empty(events):
return (expecting_url, expecting_reaction, parsed_url_message)
for event in events:
print(event)
print('expecting_url: ', expecting_url)
if expecting_url and event['type'] == 'message':
parsed_url_message = parser.parse_url_message(event)
if is_url(parsed_url_message):
printer.ask_confirmation(parsed_url_message, slack_client)
expecting_url = False
if not expecting_url and event['type'] == 'reaction_added':
reaction = parser.parse_reaction_added(event)
if react.is_confirmation(reaction['reaction'],
parsed_url_message['id'],
reaction['to_id']):
react.handle(reaction['reaction'], parsed_url_message['url'],
fb_credentials, firebase)
expecting_url = True
return (expecting_url, expecting_reaction, parsed_url_message)
|
d830e9ebe103b94fd214477cb83ad824fd27e70f
|
mcbench/settings.py
|
mcbench/settings.py
|
import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = ''
try:
from local_settings import *
except ImportError:
pass
|
import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = 'dummy'
try:
from local_settings import *
except ImportError:
pass
|
Add dummy SECRET_KEY for development (and Travis).
|
Add dummy SECRET_KEY for development (and Travis).
|
Python
|
mit
|
isbadawi/mcbench,isbadawi/mcbench
|
import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = ''
try:
from local_settings import *
except ImportError:
pass
Add dummy SECRET_KEY for development (and Travis).
|
import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = 'dummy'
try:
from local_settings import *
except ImportError:
pass
|
<commit_before>import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = ''
try:
from local_settings import *
except ImportError:
pass
<commit_msg>Add dummy SECRET_KEY for development (and Travis).<commit_after>
|
import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = 'dummy'
try:
from local_settings import *
except ImportError:
pass
|
import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = ''
try:
from local_settings import *
except ImportError:
pass
Add dummy SECRET_KEY for development (and Travis).import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = 'dummy'
try:
from local_settings import *
except ImportError:
pass
|
<commit_before>import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = ''
try:
from local_settings import *
except ImportError:
pass
<commit_msg>Add dummy SECRET_KEY for development (and Travis).<commit_after>import os
root = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
DB_PATH = os.path.join(root, 'mcbench.sqlite')
DATA_ROOT = os.path.expanduser('~/mcbench-benchmarks')
SECRET_KEY = 'dummy'
try:
from local_settings import *
except ImportError:
pass
|
7cb7a37206d4b729dc8708e3152f5423ddfa1b8a
|
wagtail/admin/forms/choosers.py
|
wagtail/admin/forms/choosers.py
|
from django import forms
from django.core import validators
from django.forms.widgets import TextInput
from django.utils.translation import ugettext_lazy
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label=ugettext_lazy(""))
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label=ugettext_lazy("#"))
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
|
from django import forms
from django.core import validators
from django.forms.widgets import TextInput
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label="")
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label="#")
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
|
Remove redundant ugettext_lazy from non-text labels
|
Remove redundant ugettext_lazy from non-text labels
|
Python
|
bsd-3-clause
|
thenewguy/wagtail,FlipperPA/wagtail,jnns/wagtail,mixxorz/wagtail,kaedroho/wagtail,jnns/wagtail,thenewguy/wagtail,torchbox/wagtail,timorieber/wagtail,takeflight/wagtail,thenewguy/wagtail,gasman/wagtail,kaedroho/wagtail,zerolab/wagtail,kaedroho/wagtail,kaedroho/wagtail,torchbox/wagtail,jnns/wagtail,wagtail/wagtail,mixxorz/wagtail,rsalmaso/wagtail,thenewguy/wagtail,nimasmi/wagtail,wagtail/wagtail,wagtail/wagtail,takeflight/wagtail,gasman/wagtail,rsalmaso/wagtail,zerolab/wagtail,wagtail/wagtail,torchbox/wagtail,gasman/wagtail,zerolab/wagtail,rsalmaso/wagtail,rsalmaso/wagtail,mixxorz/wagtail,nimasmi/wagtail,FlipperPA/wagtail,zerolab/wagtail,gasman/wagtail,FlipperPA/wagtail,timorieber/wagtail,mixxorz/wagtail,FlipperPA/wagtail,takeflight/wagtail,kaedroho/wagtail,gasman/wagtail,nimasmi/wagtail,mixxorz/wagtail,rsalmaso/wagtail,timorieber/wagtail,zerolab/wagtail,takeflight/wagtail,torchbox/wagtail,nimasmi/wagtail,jnns/wagtail,wagtail/wagtail,timorieber/wagtail,thenewguy/wagtail
|
from django import forms
from django.core import validators
from django.forms.widgets import TextInput
from django.utils.translation import ugettext_lazy
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label=ugettext_lazy(""))
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label=ugettext_lazy("#"))
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
Remove redundant ugettext_lazy from non-text labels
|
from django import forms
from django.core import validators
from django.forms.widgets import TextInput
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label="")
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label="#")
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
|
<commit_before>from django import forms
from django.core import validators
from django.forms.widgets import TextInput
from django.utils.translation import ugettext_lazy
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label=ugettext_lazy(""))
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label=ugettext_lazy("#"))
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
<commit_msg>Remove redundant ugettext_lazy from non-text labels<commit_after>
|
from django import forms
from django.core import validators
from django.forms.widgets import TextInput
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label="")
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label="#")
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
|
from django import forms
from django.core import validators
from django.forms.widgets import TextInput
from django.utils.translation import ugettext_lazy
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label=ugettext_lazy(""))
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label=ugettext_lazy("#"))
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
Remove redundant ugettext_lazy from non-text labelsfrom django import forms
from django.core import validators
from django.forms.widgets import TextInput
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label="")
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label="#")
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
|
<commit_before>from django import forms
from django.core import validators
from django.forms.widgets import TextInput
from django.utils.translation import ugettext_lazy
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label=ugettext_lazy(""))
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label=ugettext_lazy("#"))
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
<commit_msg>Remove redundant ugettext_lazy from non-text labels<commit_after>from django import forms
from django.core import validators
from django.forms.widgets import TextInput
class URLOrAbsolutePathValidator(validators.URLValidator):
@staticmethod
def is_absolute_path(value):
return value.startswith('/')
def __call__(self, value):
if URLOrAbsolutePathValidator.is_absolute_path(value):
return None
else:
return super().__call__(value)
class URLOrAbsolutePathField(forms.URLField):
widget = TextInput
default_validators = [URLOrAbsolutePathValidator()]
def to_python(self, value):
if not URLOrAbsolutePathValidator.is_absolute_path(value):
value = super().to_python(value)
return value
class ExternalLinkChooserForm(forms.Form):
url = URLOrAbsolutePathField(required=True, label="")
link_text = forms.CharField(required=False)
class AnchorLinkChooserForm(forms.Form):
url = forms.CharField(required=True, label="#")
link_text = forms.CharField(required=False)
class EmailLinkChooserForm(forms.Form):
email_address = forms.EmailField(required=True)
link_text = forms.CharField(required=False)
class PhoneLinkChooserForm(forms.Form):
phone_number = forms.CharField(required=True)
link_text = forms.CharField(required=False)
|
874ead2ed9de86eea20c4a67ce7b53cb2766c09e
|
erpnext/patches/v5_0/link_warehouse_with_account.py
|
erpnext/patches/v5_0/link_warehouse_with_account.py
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
if "master_name" in frappe.db.get_table_columns("Account"):
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
|
Update warehouse as per master_name if master_name exists
|
Update warehouse as per master_name if master_name exists
|
Python
|
agpl-3.0
|
indictranstech/fbd_erpnext,gangadharkadam/saloon_erp_install,mbauskar/helpdesk-erpnext,gmarke/erpnext,Tejal011089/paypal_erpnext,Tejal011089/trufil-erpnext,treejames/erpnext,indictranstech/reciphergroup-erpnext,pombredanne/erpnext,gangadharkadam/saloon_erp,gangadharkadam/vlinkerp,hatwar/buyback-erpnext,shft117/SteckerApp,Drooids/erpnext,treejames/erpnext,mbauskar/omnitech-erpnext,susuchina/ERPNEXT,gmarke/erpnext,shft117/SteckerApp,mbauskar/alec_frappe5_erpnext,indictranstech/reciphergroup-erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/fbd_erpnext,Tejal011089/fbd_erpnext,sheafferusa/erpnext,mbauskar/alec_frappe5_erpnext,fuhongliang/erpnext,geekroot/erpnext,mahabuber/erpnext,hatwar/buyback-erpnext,saurabh6790/test-erp,gangadharkadam/saloon_erp,Tejal011089/osmosis_erpnext,mbauskar/Das_Erpnext,mbauskar/alec_frappe5_erpnext,gangadharkadam/contributionerp,mbauskar/helpdesk-erpnext,meisterkleister/erpnext,indictranstech/fbd_erpnext,SPKian/Testing2,hanselke/erpnext-1,sheafferusa/erpnext,hatwar/Das_erpnext,indictranstech/trufil-erpnext,mbauskar/omnitech-erpnext,anandpdoshi/erpnext,hatwar/buyback-erpnext,Tejal011089/osmosis_erpnext,susuchina/ERPNEXT,gangadharkadam/vlinkerp,mbauskar/helpdesk-erpnext,indictranstech/tele-erpnext,indictranstech/Das_Erpnext,mbauskar/Das_Erpnext,tmimori/erpnext,Aptitudetech/ERPNext,netfirms/erpnext,gangadharkadam/contributionerp,netfirms/erpnext,rohitwaghchaure/GenieManager-erpnext,ShashaQin/erpnext,pombredanne/erpnext,SPKian/Testing,hanselke/erpnext-1,hernad/erpnext,mbauskar/sapphire-erpnext,hernad/erpnext,mahabuber/erpnext,anandpdoshi/erpnext,Tejal011089/osmosis_erpnext,hanselke/erpnext-1,susuchina/ERPNEXT,rohitwaghchaure/erpnext-receipher,indictranstech/reciphergroup-erpnext,MartinEnder/erpnext-de,Tejal011089/huntercamp_erpnext,ThiagoGarciaAlves/erpnext,shft117/SteckerApp,rohitwaghchaure/GenieManager-erpnext,SPKian/Testing2,shitolepriya/test-erp,saurabh6790/test-erp,Drooids/erpnext,njmube/erpnext,pombredanne/erpnext,gsnbng/erpnext,mbauskar/omnitech-erpnext,gangadharkadam/v6_erp,indictranstech/biggift-erpnext,rohitwaghchaure/GenieManager-erpnext,indictranstech/tele-erpnext,ShashaQin/erpnext,gangadharkadam/saloon_erp_install,tmimori/erpnext,Tejal011089/huntercamp_erpnext,Tejal011089/paypal_erpnext,indictranstech/erpnext,njmube/erpnext,mbauskar/Das_Erpnext,ThiagoGarciaAlves/erpnext,hatwar/Das_erpnext,gangadhar-kadam/helpdesk-erpnext,hernad/erpnext,rohitwaghchaure/GenieManager-erpnext,Drooids/erpnext,indictranstech/reciphergroup-erpnext,shitolepriya/test-erp,fuhongliang/erpnext,dieface/erpnext,indictranstech/osmosis-erpnext,gangadharkadam/contributionerp,Tejal011089/huntercamp_erpnext,mahabuber/erpnext,mbauskar/Das_Erpnext,gmarke/erpnext,indictranstech/tele-erpnext,saurabh6790/test-erp,pombredanne/erpnext,Suninus/erpnext,ShashaQin/erpnext,sheafferusa/erpnext,treejames/erpnext,SPKian/Testing,fuhongliang/erpnext,indictranstech/fbd_erpnext,mahabuber/erpnext,gangadharkadam/saloon_erp_install,MartinEnder/erpnext-de,Suninus/erpnext,ThiagoGarciaAlves/erpnext,Tejal011089/trufil-erpnext,MartinEnder/erpnext-de,Suninus/erpnext,indictranstech/erpnext,rohitwaghchaure/erpnext-receipher,mbauskar/helpdesk-erpnext,indictranstech/biggift-erpnext,indictranstech/Das_Erpnext,gangadharkadam/v6_erp,gmarke/erpnext,gsnbng/erpnext,sagar30051991/ozsmart-erp,indictranstech/erpnext,geekroot/erpnext,susuchina/ERPNEXT,netfirms/erpnext,dieface/erpnext,SPKian/Testing,indictranstech/fbd_erpnext,treejames/erpnext,tmimori/erpnext,gangadharkadam/saloon_erp,indictranstech/tele-erpnext,hatwar/Das_erpnext,aruizramon/alec_erpnext,mbauskar/sapphire-erpnext,ThiagoGarciaAlves/erpnext,mbauskar/omnitech-erpnext,Tejal011089/osmosis_erpnext,Tejal011089/huntercamp_erpnext,indictranstech/osmosis-erpnext,fuhongliang/erpnext,gangadharkadam/v6_erp,gangadharkadam/vlinkerp,aruizramon/alec_erpnext,indictranstech/osmosis-erpnext,SPKian/Testing,sheafferusa/erpnext,gangadhar-kadam/helpdesk-erpnext,Tejal011089/trufil-erpnext,indictranstech/trufil-erpnext,anandpdoshi/erpnext,indictranstech/Das_Erpnext,gangadhar-kadam/helpdesk-erpnext,netfirms/erpnext,mbauskar/sapphire-erpnext,gangadharkadam/saloon_erp_install,dieface/erpnext,aruizramon/alec_erpnext,shft117/SteckerApp,sagar30051991/ozsmart-erp,gangadharkadam/vlinkerp,saurabh6790/test-erp,Drooids/erpnext,indictranstech/biggift-erpnext,SPKian/Testing2,Suninus/erpnext,gsnbng/erpnext,indictranstech/trufil-erpnext,mbauskar/sapphire-erpnext,Tejal011089/fbd_erpnext,ShashaQin/erpnext,shitolepriya/test-erp,njmube/erpnext,MartinEnder/erpnext-de,Tejal011089/paypal_erpnext,aruizramon/alec_erpnext,tmimori/erpnext,Tejal011089/fbd_erpnext,indictranstech/trufil-erpnext,mbauskar/omnitech-demo-erpnext,gangadhar-kadam/helpdesk-erpnext,sagar30051991/ozsmart-erp,indictranstech/Das_Erpnext,hatwar/buyback-erpnext,anandpdoshi/erpnext,indictranstech/osmosis-erpnext,rohitwaghchaure/erpnext-receipher,meisterkleister/erpnext,hanselke/erpnext-1,gangadharkadam/saloon_erp,sagar30051991/ozsmart-erp,mbauskar/alec_frappe5_erpnext,SPKian/Testing2,mbauskar/omnitech-demo-erpnext,geekroot/erpnext,njmube/erpnext,meisterkleister/erpnext,hatwar/Das_erpnext,hernad/erpnext,indictranstech/erpnext,gsnbng/erpnext,gangadharkadam/contributionerp,indictranstech/biggift-erpnext,meisterkleister/erpnext,gangadharkadam/v6_erp,shitolepriya/test-erp,geekroot/erpnext,Tejal011089/paypal_erpnext,dieface/erpnext,mbauskar/omnitech-demo-erpnext,Tejal011089/trufil-erpnext,rohitwaghchaure/erpnext-receipher
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")Update warehouse as per master_name if master_name exists
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
if "master_name" in frappe.db.get_table_columns("Account"):
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
|
<commit_before># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")<commit_msg>Update warehouse as per master_name if master_name exists<commit_after>
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
if "master_name" in frappe.db.get_table_columns("Account"):
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")Update warehouse as per master_name if master_name exists# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
if "master_name" in frappe.db.get_table_columns("Account"):
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
|
<commit_before># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")<commit_msg>Update warehouse as per master_name if master_name exists<commit_after># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
if "master_name" in frappe.db.get_table_columns("Account"):
frappe.db.sql("""update tabAccount set warehouse=master_name
where ifnull(account_type, '') = 'Warehouse' and ifnull(master_name, '') != ''""")
|
c8392e6c0210c9b308927c807c44449ebd31694e
|
enthought/traits/ui/editors/date_editor.py
|
enthought/traits/ui/editors/date_editor.py
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# TODO: Placeholder for date-editor-specific traits.
pass
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.trait_types import Bool
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Custom date editors can operate on a list of Dates, or just one.
multi_select = Bool(True)
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
|
Add multi_select in the DateEditor params for Custom editors.
|
Add multi_select in the DateEditor params for Custom editors.
|
Python
|
bsd-3-clause
|
burnpanck/traits,burnpanck/traits
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# TODO: Placeholder for date-editor-specific traits.
pass
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
Add multi_select in the DateEditor params for Custom editors.
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.trait_types import Bool
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Custom date editors can operate on a list of Dates, or just one.
multi_select = Bool(True)
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
|
<commit_before>#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# TODO: Placeholder for date-editor-specific traits.
pass
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
<commit_msg>Add multi_select in the DateEditor params for Custom editors.<commit_after>
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.trait_types import Bool
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Custom date editors can operate on a list of Dates, or just one.
multi_select = Bool(True)
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
|
#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# TODO: Placeholder for date-editor-specific traits.
pass
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
Add multi_select in the DateEditor params for Custom editors.#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.trait_types import Bool
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Custom date editors can operate on a list of Dates, or just one.
multi_select = Bool(True)
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
|
<commit_before>#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# TODO: Placeholder for date-editor-specific traits.
pass
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
<commit_msg>Add multi_select in the DateEditor params for Custom editors.<commit_after>#------------------------------------------------------------------------------
#
# Copyright (c) 2008, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in enthought/LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
# Author: Judah De Paula
# Date: 10/7/2008
#
#------------------------------------------------------------------------------
"""
A Traits UI editor that wraps a WX calendar panel.
"""
import datetime
from enthought.traits.traits import Property
from enthought.traits.trait_types import Bool
from enthought.traits.ui.editor_factory import EditorFactory
from enthought.traits.ui.toolkit import toolkit_object
#-- DateEditor definition -----------------------------------------------------
class DateEditor ( EditorFactory ):
"""
Editor factory for date/time editors.
"""
#---------------------------------------------------------------------------
# Trait definitions:
#---------------------------------------------------------------------------
# Custom date editors can operate on a list of Dates, or just one.
multi_select = Bool(True)
#-- end DateEditor definition -------------------------------------------------
#-- eof -----------------------------------------------------------------------
|
d9d22ea370f794681e5a3e6e7683a83f4bbe356a
|
feature_extraction/measurements/texture_haralick.py
|
feature_extraction/measurements/texture_haralick.py
|
from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
image = image[mask]
# -- haralick setup and run
return []
|
from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
# TODO(liam): we can probably use scipy.MaskedArray to get a speedup here
image = image.copy()
image[~mask] = 0 # set everything *outside* the cell to 0
# -- haralick setup and run
return []
|
Fix the image masking in HaralickTexture
|
Fix the image masking in HaralickTexture
|
Python
|
apache-2.0
|
widoptimization-willett/feature-extraction
|
from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
image = image[mask]
# -- haralick setup and run
return []
Fix the image masking in HaralickTexture
|
from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
# TODO(liam): we can probably use scipy.MaskedArray to get a speedup here
image = image.copy()
image[~mask] = 0 # set everything *outside* the cell to 0
# -- haralick setup and run
return []
|
<commit_before>from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
image = image[mask]
# -- haralick setup and run
return []
<commit_msg>Fix the image masking in HaralickTexture<commit_after>
|
from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
# TODO(liam): we can probably use scipy.MaskedArray to get a speedup here
image = image.copy()
image[~mask] = 0 # set everything *outside* the cell to 0
# -- haralick setup and run
return []
|
from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
image = image[mask]
# -- haralick setup and run
return []
Fix the image masking in HaralickTexturefrom . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
# TODO(liam): we can probably use scipy.MaskedArray to get a speedup here
image = image.copy()
image[~mask] = 0 # set everything *outside* the cell to 0
# -- haralick setup and run
return []
|
<commit_before>from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
image = image[mask]
# -- haralick setup and run
return []
<commit_msg>Fix the image masking in HaralickTexture<commit_after>from . import Measurement
import feature_extraction.util.cleanup as cleanup
from skimage.morphology import binary_erosion, disk
class HaralickTexture(Measurement):
default_options = {
'clip_cell_borders': True,
'erode_cell': False,
'erode_cell_amount': False,
}
def __init__(self, options=None):
super(HaralickTexture, self).__init__(options)
def compute(self, image):
# -- preprocessing
if self.options.clip_cell_borders:
# get the cell boundary mask
mask = cleanup.cell_boundary_mask(image)
# if we're told to, erode the mask with a disk by some amount
if self.options.erode_cell:
mask = binary_erosion(cleanup.cell_boundary_mask(), disk(self.options.erode_cell_amount))
# mask the image
# TODO(liam): we can probably use scipy.MaskedArray to get a speedup here
image = image.copy()
image[~mask] = 0 # set everything *outside* the cell to 0
# -- haralick setup and run
return []
|
edfbeabb802e64527094d46680f994a44ed7f0bd
|
froide_campaign/providers/amenity_local.py
|
froide_campaign/providers/amenity_local.py
|
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("distance")[:1]
if nearby_pbs:
return nearby_pbs[0]
return super()._get_publicbody(amenity)
|
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_popular_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("-number_of_requests", "distance")[:1]
if nearby_popular_pbs:
return nearby_popular_pbs[0]
return super()._get_publicbody(amenity)
|
Select popular pbs first instead of only closest
|
Select popular pbs first instead of only closest
|
Python
|
mit
|
okfde/froide-campaign,okfde/froide-campaign,okfde/froide-campaign
|
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("distance")[:1]
if nearby_pbs:
return nearby_pbs[0]
return super()._get_publicbody(amenity)
Select popular pbs first instead of only closest
|
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_popular_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("-number_of_requests", "distance")[:1]
if nearby_popular_pbs:
return nearby_popular_pbs[0]
return super()._get_publicbody(amenity)
|
<commit_before>from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("distance")[:1]
if nearby_pbs:
return nearby_pbs[0]
return super()._get_publicbody(amenity)
<commit_msg>Select popular pbs first instead of only closest<commit_after>
|
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_popular_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("-number_of_requests", "distance")[:1]
if nearby_popular_pbs:
return nearby_popular_pbs[0]
return super()._get_publicbody(amenity)
|
from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("distance")[:1]
if nearby_pbs:
return nearby_pbs[0]
return super()._get_publicbody(amenity)
Select popular pbs first instead of only closestfrom django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_popular_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("-number_of_requests", "distance")[:1]
if nearby_popular_pbs:
return nearby_popular_pbs[0]
return super()._get_publicbody(amenity)
|
<commit_before>from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("distance")[:1]
if nearby_pbs:
return nearby_pbs[0]
return super()._get_publicbody(amenity)
<commit_msg>Select popular pbs first instead of only closest<commit_after>from django.contrib.gis.measure import D
from django.contrib.gis.db.models.functions import Distance
from froide.publicbody.models import PublicBody
from .amenity import AmenityProvider
class AmenityLocalProvider(AmenityProvider):
'''
Like Amenity provider but tries to find the public body
for the amenity at its location
'''
NEARBY_RADIUS = 200
def _get_publicbody(self, amenity):
nearby_popular_pbs = PublicBody.objects.filter(
geo__isnull=False
).filter(
geo__dwithin=(amenity.geo, self.NEARBY_RADIUS)
).filter(
geo__distance_lte=(amenity.geo, D(m=self.NEARBY_RADIUS))
).annotate(
distance=Distance("geo", amenity.geo)
).order_by("-number_of_requests", "distance")[:1]
if nearby_popular_pbs:
return nearby_popular_pbs[0]
return super()._get_publicbody(amenity)
|
39ea591073339ad86f77c22f7b29436efcf01a0e
|
egpackager/datamanager.py
|
egpackager/datamanager.py
|
import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data[kwargs['uri']] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
@property
def data(self):
return self._data
|
import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data['metadata'] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data['resource'] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
self._data['uri'] = kwargs['uri']
@property
def data(self):
return self._data
def get_metadata_value(self, key, value):
return self.data['metadata'].get_value(key, value)
def get_resource_value(self, key, value):
return self.data['resource'].get_value(key, value)
@property
def metadata(self):
return self.data['metadata'].data
@property
def resource_metadata(self):
return self.data['resource'].data
|
Add metadata and resource distinction in DataManager
|
Add metadata and resource distinction in DataManager
|
Python
|
mit
|
VUEG/egpackager
|
import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data[kwargs['uri']] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
@property
def data(self):
return self._data
Add metadata and resource distinction in DataManager
|
import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data['metadata'] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data['resource'] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
self._data['uri'] = kwargs['uri']
@property
def data(self):
return self._data
def get_metadata_value(self, key, value):
return self.data['metadata'].get_value(key, value)
def get_resource_value(self, key, value):
return self.data['resource'].get_value(key, value)
@property
def metadata(self):
return self.data['metadata'].data
@property
def resource_metadata(self):
return self.data['resource'].data
|
<commit_before>import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data[kwargs['uri']] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
@property
def data(self):
return self._data
<commit_msg>Add metadata and resource distinction in DataManager<commit_after>
|
import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data['metadata'] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data['resource'] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
self._data['uri'] = kwargs['uri']
@property
def data(self):
return self._data
def get_metadata_value(self, key, value):
return self.data['metadata'].get_value(key, value)
def get_resource_value(self, key, value):
return self.data['resource'].get_value(key, value)
@property
def metadata(self):
return self.data['metadata'].data
@property
def resource_metadata(self):
return self.data['resource'].data
|
import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data[kwargs['uri']] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
@property
def data(self):
return self._data
Add metadata and resource distinction in DataManagerimport logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data['metadata'] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data['resource'] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
self._data['uri'] = kwargs['uri']
@property
def data(self):
return self._data
def get_metadata_value(self, key, value):
return self.data['metadata'].get_value(key, value)
def get_resource_value(self, key, value):
return self.data['resource'].get_value(key, value)
@property
def metadata(self):
return self.data['metadata'].data
@property
def resource_metadata(self):
return self.data['resource'].data
|
<commit_before>import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data[kwargs['uri']] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data[kwargs['uri']] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
@property
def data(self):
return self._data
<commit_msg>Add metadata and resource distinction in DataManager<commit_after>import logging
from collections import OrderedDict
from egpackager.datasources import GspreadDataSource, RasterDataSource
class DataManager(object):
def __init__(self, debug=False):
# Set up logging
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
self.logger = logging.getLogger(__name__)
self.logger.debug("Initializing new registry manager")
self._data = OrderedDict()
def add_datasource(self, *args, **kwargs):
if 'type' not in kwargs:
raise TypeError("Missing require keyword argument: 'type")
if kwargs['type'] == 'gspread':
# Remove keyword argument 'type' as it us not needed anymore
del kwargs['type']
self.logger.debug('Adding Google Sheets data source')
self._data['metadata'] = GspreadDataSource(*args, **kwargs)
elif kwargs['type'] == 'raster':
del kwargs['type']
self.logger.debug('Adding raster data source')
self._data['resource'] = RasterDataSource(*args, **kwargs)
else:
raise TypeError("Unknown data source type: {0}".format(kwargs['type']))
self._data['uri'] = kwargs['uri']
@property
def data(self):
return self._data
def get_metadata_value(self, key, value):
return self.data['metadata'].get_value(key, value)
def get_resource_value(self, key, value):
return self.data['resource'].get_value(key, value)
@property
def metadata(self):
return self.data['metadata'].data
@property
def resource_metadata(self):
return self.data['resource'].data
|
e0d2ce09475e3ae07e2740cbf0e342f68c1564a8
|
gn/standalone/toolchain/linux_find_llvm.py
|
gn/standalone/toolchain/linux_find_llvm.py
|
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs'])
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print os.path.abspath(lib)
print clang
print clang.replace('clang', 'clang++')
return 0
print 'Could not find the LLVM lib dir'
return 1
if __name__ == '__main__':
sys.exit(main())
|
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs']).decode("utf-8")
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print(os.path.abspath(lib))
print(clang)
print(clang.replace('clang', 'clang++'))
return 0
print('Could not find the LLVM lib dir')
return 1
if __name__ == '__main__':
sys.exit(main())
|
Fix issue with finding llvm when using python3
|
gn: Fix issue with finding llvm when using python3
With python3, subprocess output is a byte sequence. This needs to be
decoded to string so that the string functions work. Fix it so we can
find LLVM when building perfetto.
Also fix 'print' operator which is a function in python3.
Bug: 147789115
Signed-off-by: Joel Fernandes <89f39a38232ac523c7644e47b6ca6563177e40b4@google.com>
Change-Id: I4ab9b3c248d471e7ab5a27559152a1954ca43108
|
Python
|
apache-2.0
|
google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto
|
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs'])
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print os.path.abspath(lib)
print clang
print clang.replace('clang', 'clang++')
return 0
print 'Could not find the LLVM lib dir'
return 1
if __name__ == '__main__':
sys.exit(main())
gn: Fix issue with finding llvm when using python3
With python3, subprocess output is a byte sequence. This needs to be
decoded to string so that the string functions work. Fix it so we can
find LLVM when building perfetto.
Also fix 'print' operator which is a function in python3.
Bug: 147789115
Signed-off-by: Joel Fernandes <89f39a38232ac523c7644e47b6ca6563177e40b4@google.com>
Change-Id: I4ab9b3c248d471e7ab5a27559152a1954ca43108
|
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs']).decode("utf-8")
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print(os.path.abspath(lib))
print(clang)
print(clang.replace('clang', 'clang++'))
return 0
print('Could not find the LLVM lib dir')
return 1
if __name__ == '__main__':
sys.exit(main())
|
<commit_before># Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs'])
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print os.path.abspath(lib)
print clang
print clang.replace('clang', 'clang++')
return 0
print 'Could not find the LLVM lib dir'
return 1
if __name__ == '__main__':
sys.exit(main())
<commit_msg>gn: Fix issue with finding llvm when using python3
With python3, subprocess output is a byte sequence. This needs to be
decoded to string so that the string functions work. Fix it so we can
find LLVM when building perfetto.
Also fix 'print' operator which is a function in python3.
Bug: 147789115
Signed-off-by: Joel Fernandes <89f39a38232ac523c7644e47b6ca6563177e40b4@google.com>
Change-Id: I4ab9b3c248d471e7ab5a27559152a1954ca43108<commit_after>
|
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs']).decode("utf-8")
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print(os.path.abspath(lib))
print(clang)
print(clang.replace('clang', 'clang++'))
return 0
print('Could not find the LLVM lib dir')
return 1
if __name__ == '__main__':
sys.exit(main())
|
# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs'])
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print os.path.abspath(lib)
print clang
print clang.replace('clang', 'clang++')
return 0
print 'Could not find the LLVM lib dir'
return 1
if __name__ == '__main__':
sys.exit(main())
gn: Fix issue with finding llvm when using python3
With python3, subprocess output is a byte sequence. This needs to be
decoded to string so that the string functions work. Fix it so we can
find LLVM when building perfetto.
Also fix 'print' operator which is a function in python3.
Bug: 147789115
Signed-off-by: Joel Fernandes <89f39a38232ac523c7644e47b6ca6563177e40b4@google.com>
Change-Id: I4ab9b3c248d471e7ab5a27559152a1954ca43108# Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs']).decode("utf-8")
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print(os.path.abspath(lib))
print(clang)
print(clang.replace('clang', 'clang++'))
return 0
print('Could not find the LLVM lib dir')
return 1
if __name__ == '__main__':
sys.exit(main())
|
<commit_before># Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs'])
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print os.path.abspath(lib)
print clang
print clang.replace('clang', 'clang++')
return 0
print 'Could not find the LLVM lib dir'
return 1
if __name__ == '__main__':
sys.exit(main())
<commit_msg>gn: Fix issue with finding llvm when using python3
With python3, subprocess output is a byte sequence. This needs to be
decoded to string so that the string functions work. Fix it so we can
find LLVM when building perfetto.
Also fix 'print' operator which is a function in python3.
Bug: 147789115
Signed-off-by: Joel Fernandes <89f39a38232ac523c7644e47b6ca6563177e40b4@google.com>
Change-Id: I4ab9b3c248d471e7ab5a27559152a1954ca43108<commit_after># Copyright (C) 2017 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import subprocess
import sys
def main():
devnull = open(os.devnull, 'w')
for clang in ('clang', 'clang-3.8', 'clang-3.5'):
if subprocess.call(['which', clang], stdout=devnull, stderr=devnull) != 0:
continue
res = subprocess.check_output([clang, '-print-search-dirs']).decode("utf-8")
for line in res.splitlines():
if not line.startswith('libraries:'):
continue
libs = line.split('=', 1)[1].split(':')
for lib in libs:
if '/clang/' not in lib or not os.path.isdir(lib + '/lib'):
continue
print(os.path.abspath(lib))
print(clang)
print(clang.replace('clang', 'clang++'))
return 0
print('Could not find the LLVM lib dir')
return 1
if __name__ == '__main__':
sys.exit(main())
|
d3febefc0927968ca8e5040c74478a81aef31692
|
ml_adv_logistic_reg/python_code/advanced_log_reg.py
|
ml_adv_logistic_reg/python_code/advanced_log_reg.py
|
from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.n
|
from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.n
print self.X
# def oneVsAll(self, theta_len, lambda_val):
|
Add preprocessing steps for mat input
|
Add preprocessing steps for mat input
|
Python
|
mit
|
pmb311/Machine_Learning,pmb311/Machine_Learning,pmb311/Machine_Learning,pmb311/Machine_Learning
|
from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.nAdd preprocessing steps for mat input
|
from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.n
print self.X
# def oneVsAll(self, theta_len, lambda_val):
|
<commit_before>from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.n<commit_msg>Add preprocessing steps for mat input<commit_after>
|
from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.n
print self.X
# def oneVsAll(self, theta_len, lambda_val):
|
from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.nAdd preprocessing steps for mat inputfrom get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.n
print self.X
# def oneVsAll(self, theta_len, lambda_val):
|
<commit_before>from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.n<commit_msg>Add preprocessing steps for mat input<commit_after>from get_data_from_source import GetDataFromSource
from regularized_log_reg import RegularizedLogReg
import matplotlib.pyplot as plt
class AdvancedLogReg(RegularizedLogReg):
def test_something(self):
print self.m, self.n
print self.X
# def oneVsAll(self, theta_len, lambda_val):
|
db15dc066e238022dcf23559d882a29fbc42a90b
|
mxfield/validators.py
|
mxfield/validators.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
@deconstructible
class CURPValidator(validators.RegexValidator):
regex = re.compile(r'^([A-Z][A,E,I,O,U,X][A-Z]{2}[0-9]{2}[0-1][0-9][0-3][0-9][M,H][A-Z]{2}[B,C,D,F,G,H,J,K,L,M,N,Ñ,P,Q,R,S,T,V,W,X,Y,Z]{3}[0-9,A-Z][0-9])$', re.IGNORECASE)
message = _('Enter a valid CURP.')
|
Add class validator for CURP field
|
Add class validator for CURP field
|
Python
|
mit
|
krescruz/django-mxfield
|
Add class validator for CURP field
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
@deconstructible
class CURPValidator(validators.RegexValidator):
regex = re.compile(r'^([A-Z][A,E,I,O,U,X][A-Z]{2}[0-9]{2}[0-1][0-9][0-3][0-9][M,H][A-Z]{2}[B,C,D,F,G,H,J,K,L,M,N,Ñ,P,Q,R,S,T,V,W,X,Y,Z]{3}[0-9,A-Z][0-9])$', re.IGNORECASE)
message = _('Enter a valid CURP.')
|
<commit_before><commit_msg>Add class validator for CURP field<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
@deconstructible
class CURPValidator(validators.RegexValidator):
regex = re.compile(r'^([A-Z][A,E,I,O,U,X][A-Z]{2}[0-9]{2}[0-1][0-9][0-3][0-9][M,H][A-Z]{2}[B,C,D,F,G,H,J,K,L,M,N,Ñ,P,Q,R,S,T,V,W,X,Y,Z]{3}[0-9,A-Z][0-9])$', re.IGNORECASE)
message = _('Enter a valid CURP.')
|
Add class validator for CURP field#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
@deconstructible
class CURPValidator(validators.RegexValidator):
regex = re.compile(r'^([A-Z][A,E,I,O,U,X][A-Z]{2}[0-9]{2}[0-1][0-9][0-3][0-9][M,H][A-Z]{2}[B,C,D,F,G,H,J,K,L,M,N,Ñ,P,Q,R,S,T,V,W,X,Y,Z]{3}[0-9,A-Z][0-9])$', re.IGNORECASE)
message = _('Enter a valid CURP.')
|
<commit_before><commit_msg>Add class validator for CURP field<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
from django.core import validators
from django.utils.deconstruct import deconstructible
from django.utils.translation import ugettext_lazy as _
@deconstructible
class CURPValidator(validators.RegexValidator):
regex = re.compile(r'^([A-Z][A,E,I,O,U,X][A-Z]{2}[0-9]{2}[0-1][0-9][0-3][0-9][M,H][A-Z]{2}[B,C,D,F,G,H,J,K,L,M,N,Ñ,P,Q,R,S,T,V,W,X,Y,Z]{3}[0-9,A-Z][0-9])$', re.IGNORECASE)
message = _('Enter a valid CURP.')
|
|
ab9ec5d7b2e8675cb9e7593a8adc0a0e9f0955bb
|
IPython/html/widgets/__init__.py
|
IPython/html/widgets/__init__.py
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("IPython widgets are experimental and may change in the future.", FutureWarning, stacklevel=2)
|
Make the widget error message shorter and more understandable.
|
Make the widget error message shorter and more understandable.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
Make the widget error message shorter and more understandable.
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("IPython widgets are experimental and may change in the future.", FutureWarning, stacklevel=2)
|
<commit_before>from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
<commit_msg>Make the widget error message shorter and more understandable.<commit_after>
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("IPython widgets are experimental and may change in the future.", FutureWarning, stacklevel=2)
|
from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
Make the widget error message shorter and more understandable.from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("IPython widgets are experimental and may change in the future.", FutureWarning, stacklevel=2)
|
<commit_before>from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("""The widget API is still considered experimental and may change in the future.""", FutureWarning, stacklevel=2)
<commit_msg>Make the widget error message shorter and more understandable.<commit_after>from .widget import Widget, DOMWidget, CallbackDispatcher, register
from .widget_bool import Checkbox, ToggleButton
from .widget_button import Button
from .widget_box import Box, Popup, FlexBox, HBox, VBox
from .widget_float import FloatText, BoundedFloatText, FloatSlider, FloatProgress, FloatRangeSlider
from .widget_image import Image
from .widget_int import IntText, BoundedIntText, IntSlider, IntProgress, IntRangeSlider
from .widget_output import Output
from .widget_selection import RadioButtons, ToggleButtons, Dropdown, Select
from .widget_selectioncontainer import Tab, Accordion
from .widget_string import HTML, Latex, Text, Textarea
from .interaction import interact, interactive, fixed, interact_manual
from .widget_link import Link, link, DirectionalLink, dlink
# Deprecated classes
from .widget_bool import CheckboxWidget, ToggleButtonWidget
from .widget_button import ButtonWidget
from .widget_box import ContainerWidget, PopupWidget
from .widget_float import FloatTextWidget, BoundedFloatTextWidget, FloatSliderWidget, FloatProgressWidget
from .widget_image import ImageWidget
from .widget_int import IntTextWidget, BoundedIntTextWidget, IntSliderWidget, IntProgressWidget
from .widget_selection import RadioButtonsWidget, ToggleButtonsWidget, DropdownWidget, SelectWidget
from .widget_selectioncontainer import TabWidget, AccordionWidget
from .widget_string import HTMLWidget, LatexWidget, TextWidget, TextareaWidget
# Warn on import
from warnings import warn
warn("IPython widgets are experimental and may change in the future.", FutureWarning, stacklevel=2)
|
37f365449089e45dc68f32ff7dbd6db781ad3b9f
|
LiSE/LiSE/tests/test_examples.py
|
LiSE/LiSE/tests/test_examples.py
|
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(10):
engy.next_turn()
|
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
Use more turns in test_sickle
|
Use more turns in test_sickle
The weird bugs only showed up late
|
Python
|
agpl-3.0
|
LogicalDash/LiSE,LogicalDash/LiSE
|
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(10):
engy.next_turn()
Use more turns in test_sickle
The weird bugs only showed up late
|
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
<commit_before>from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(10):
engy.next_turn()
<commit_msg>Use more turns in test_sickle
The weird bugs only showed up late<commit_after>
|
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(10):
engy.next_turn()
Use more turns in test_sickle
The weird bugs only showed up latefrom LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
<commit_before>from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(10):
engy.next_turn()
<commit_msg>Use more turns in test_sickle
The weird bugs only showed up late<commit_after>from LiSE.examples import college, kobold, polygons, sickle
def test_college(engy):
college.install(engy)
engy.turn = 10 # wake up the students
engy.next_turn()
def test_kobold(engy):
kobold.inittest(engy, shrubberies=20, kobold_sprint_chance=.9)
for i in range(10):
engy.next_turn()
def test_polygons(engy):
polygons.install(engy)
for i in range(10):
engy.next_turn()
def test_sickle(engy):
sickle.install(engy)
for i in range(100):
engy.next_turn()
|
da6406d1c419f18bd128af4d2d4e2578142cd783
|
zou/migrations/versions/5a291251823c_add_max_retake_parameter.py
|
zou/migrations/versions/5a291251823c_add_max_retake_parameter.py
|
"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
op.drop_index('ix_task_status_is_default', table_name='task_status')
op.create_index(op.f('ix_task_status_is_default'), 'task_status', ['is_default'], unique=False)
op.drop_column('task_type', 'for_shots')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_type', sa.Column('for_shots', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.drop_index(op.f('ix_task_status_is_default'), table_name='task_status')
op.create_index('ix_task_status_is_default', 'task_status', ['is_default'], unique=False)
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
|
"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
|
Fix max retake migration file
|
[db] Fix max retake migration file
|
Python
|
agpl-3.0
|
cgwire/zou
|
"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
op.drop_index('ix_task_status_is_default', table_name='task_status')
op.create_index(op.f('ix_task_status_is_default'), 'task_status', ['is_default'], unique=False)
op.drop_column('task_type', 'for_shots')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_type', sa.Column('for_shots', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.drop_index(op.f('ix_task_status_is_default'), table_name='task_status')
op.create_index('ix_task_status_is_default', 'task_status', ['is_default'], unique=False)
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
[db] Fix max retake migration file
|
"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
|
<commit_before>"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
op.drop_index('ix_task_status_is_default', table_name='task_status')
op.create_index(op.f('ix_task_status_is_default'), 'task_status', ['is_default'], unique=False)
op.drop_column('task_type', 'for_shots')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_type', sa.Column('for_shots', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.drop_index(op.f('ix_task_status_is_default'), table_name='task_status')
op.create_index('ix_task_status_is_default', 'task_status', ['is_default'], unique=False)
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
<commit_msg>[db] Fix max retake migration file<commit_after>
|
"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
|
"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
op.drop_index('ix_task_status_is_default', table_name='task_status')
op.create_index(op.f('ix_task_status_is_default'), 'task_status', ['is_default'], unique=False)
op.drop_column('task_type', 'for_shots')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_type', sa.Column('for_shots', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.drop_index(op.f('ix_task_status_is_default'), table_name='task_status')
op.create_index('ix_task_status_is_default', 'task_status', ['is_default'], unique=False)
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
[db] Fix max retake migration file"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
|
<commit_before>"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
op.drop_index('ix_task_status_is_default', table_name='task_status')
op.create_index(op.f('ix_task_status_is_default'), 'task_status', ['is_default'], unique=False)
op.drop_column('task_type', 'for_shots')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('task_type', sa.Column('for_shots', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.drop_index(op.f('ix_task_status_is_default'), table_name='task_status')
op.create_index('ix_task_status_is_default', 'task_status', ['is_default'], unique=False)
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
<commit_msg>[db] Fix max retake migration file<commit_after>"""add max retake parameter
Revision ID: 5a291251823c
Revises: 4095103c7d01
Create Date: 2022-06-29 10:56:13.556495
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
# revision identifiers, used by Alembic.
revision = '5a291251823c'
down_revision = '4095103c7d01'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('project', sa.Column('max_retakes', sa.Integer(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('project', 'max_retakes')
# ### end Alembic commands ###
|
4d1a7b48b450ebcf06c90dd618622b0ddafcba03
|
xorgauth/accounts/password_validators.py
|
xorgauth/accounts/password_validators.py
|
# -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
|
# -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def validate(self, password, user=None):
return
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
|
Add missing validate method to GoogleAppsPasswordValidator
|
Add missing validate method to GoogleAppsPasswordValidator
Django complains when updating the password:
'GoogleAppsPasswordValidator' object has no attribute 'validate'
|
Python
|
agpl-3.0
|
Polytechnique-org/xorgauth,Polytechnique-org/xorgauth
|
# -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
Add missing validate method to GoogleAppsPasswordValidator
Django complains when updating the password:
'GoogleAppsPasswordValidator' object has no attribute 'validate'
|
# -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def validate(self, password, user=None):
return
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
<commit_msg>Add missing validate method to GoogleAppsPasswordValidator
Django complains when updating the password:
'GoogleAppsPasswordValidator' object has no attribute 'validate'<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def validate(self, password, user=None):
return
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
|
# -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
Add missing validate method to GoogleAppsPasswordValidator
Django complains when updating the password:
'GoogleAppsPasswordValidator' object has no attribute 'validate'# -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def validate(self, password, user=None):
return
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
<commit_msg>Add missing validate method to GoogleAppsPasswordValidator
Django complains when updating the password:
'GoogleAppsPasswordValidator' object has no attribute 'validate'<commit_after># -*- coding: utf-8 -*-
# Copyright (c) Polytechnique.org
# This code is distributed under the Affero General Public License version 3
import crypt
import sys
from django.core.exceptions import ObjectDoesNotExist
from django.utils.crypto import get_random_string
from . import models
class GoogleAppsPasswordValidator(object):
"""Update the Google Apps password when a user changes her password"""
def validate(self, password, user=None):
return
def password_changed(self, raw_password, user):
# Hash the password in a way compatible with Google Apps: crypt with $6
if sys.version_info >= (3,):
password = crypt.crypt(raw_password, salt=crypt.METHOD_SHA512)
else:
password = crypt.crypt(raw_password.encode('utf-8'), '$6$' + get_random_string(16))
try:
user.gapps_password.password = password
except ObjectDoesNotExist:
models.GoogleAppsPassword.objects.create(user=user, password=password)
else:
user.gapps_password.save()
|
d96dbe9f5688e469f34c7428569eda7d2c86f3d7
|
tests/test_err.py
|
tests/test_err.py
|
# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = exc_info.value.message
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = str(exc_info.value)
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
Test str repr of exception
|
Test str repr of exception
|
Python
|
bsd-3-clause
|
kapadia/rasterio,brendan-ward/rasterio,brendan-ward/rasterio,brendan-ward/rasterio,kapadia/rasterio,kapadia/rasterio
|
# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = exc_info.value.message
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
Test str repr of exception
|
# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = str(exc_info.value)
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
<commit_before># Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = exc_info.value.message
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
<commit_msg>Test str repr of exception<commit_after>
|
# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = str(exc_info.value)
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = exc_info.value.message
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
Test str repr of exception# Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = str(exc_info.value)
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
<commit_before># Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = exc_info.value.message
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
<commit_msg>Test str repr of exception<commit_after># Testing use of cpl_errs
import pytest
import rasterio
from rasterio.errors import RasterioIOError
def test_io_error(tmpdir):
with pytest.raises(RasterioIOError) as exc_info:
rasterio.open(str(tmpdir.join('foo.tif')))
msg = str(exc_info.value)
assert msg.startswith("'{0}'".format(tmpdir.join('foo.tif')))
assert ("does not exist in the file system, and is not recognised as a "
"supported dataset name.") in msg
def test_io_error_env(tmpdir):
with rasterio.drivers() as env:
drivers_start = env.drivers()
with pytest.raises(RasterioIOError):
rasterio.open(str(tmpdir.join('foo.tif')))
assert env.drivers() == drivers_start
def test_bogus_band_error():
with rasterio.open('tests/data/RGB.byte.tif') as src:
assert src._has_band(4) is False
|
4b30bbcde1ae9cdb3b8fda242e32d44025ef1e0a
|
articles/migrations/0010_create_indepth_page.py
|
articles/migrations/0010_create_indepth_page.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.db import migrations
def create_indepth_page(apps, schema_editor):
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
# indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
indepth_list_page_content_type, created = ContentType.objects.get_or_create(
model='indepthlistpage',
app_label='articles',
defaults={'name': 'indepthlistpage'} if DJANGO_VERSION < (1, 8) else {}
)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.contrib.contenttypes.management import update_contenttypes
from django.db import migrations
def create_indepth_page(apps, schema_editor):
update_contenttypes(apps.app_configs['articles'], interactive=False)
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
|
Update the contenttypes before trying to access them.
|
Update the contenttypes before trying to access them.
|
Python
|
mit
|
OpenCanada/website,albertoconnor/website,albertoconnor/website,OpenCanada/website,albertoconnor/website,OpenCanada/website,albertoconnor/website,OpenCanada/website
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.db import migrations
def create_indepth_page(apps, schema_editor):
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
# indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
indepth_list_page_content_type, created = ContentType.objects.get_or_create(
model='indepthlistpage',
app_label='articles',
defaults={'name': 'indepthlistpage'} if DJANGO_VERSION < (1, 8) else {}
)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
Update the contenttypes before trying to access them.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.contrib.contenttypes.management import update_contenttypes
from django.db import migrations
def create_indepth_page(apps, schema_editor):
update_contenttypes(apps.app_configs['articles'], interactive=False)
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.db import migrations
def create_indepth_page(apps, schema_editor):
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
# indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
indepth_list_page_content_type, created = ContentType.objects.get_or_create(
model='indepthlistpage',
app_label='articles',
defaults={'name': 'indepthlistpage'} if DJANGO_VERSION < (1, 8) else {}
)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
<commit_msg>Update the contenttypes before trying to access them.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.contrib.contenttypes.management import update_contenttypes
from django.db import migrations
def create_indepth_page(apps, schema_editor):
update_contenttypes(apps.app_configs['articles'], interactive=False)
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.db import migrations
def create_indepth_page(apps, schema_editor):
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
# indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
indepth_list_page_content_type, created = ContentType.objects.get_or_create(
model='indepthlistpage',
app_label='articles',
defaults={'name': 'indepthlistpage'} if DJANGO_VERSION < (1, 8) else {}
)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
Update the contenttypes before trying to access them.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.contrib.contenttypes.management import update_contenttypes
from django.db import migrations
def create_indepth_page(apps, schema_editor):
update_contenttypes(apps.app_configs['articles'], interactive=False)
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.db import migrations
def create_indepth_page(apps, schema_editor):
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
# indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
indepth_list_page_content_type, created = ContentType.objects.get_or_create(
model='indepthlistpage',
app_label='articles',
defaults={'name': 'indepthlistpage'} if DJANGO_VERSION < (1, 8) else {}
)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
<commit_msg>Update the contenttypes before trying to access them.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.contrib.contenttypes.management import update_contenttypes
from django.db import migrations
def create_indepth_page(apps, schema_editor):
update_contenttypes(apps.app_configs['articles'], interactive=False)
Page = apps.get_model("wagtailcore", "Page")
InDepthListPage = apps.get_model("articles", "InDepthListPage")
home_page = Page.objects.get(slug="home")
ContentType = apps.get_model("contenttypes", "ContentType")
indepth_list_page_content_type = ContentType.objects.get_for_model(InDepthListPage)
# Create features page
indepth_page = InDepthListPage.objects.create(
title="InDepth",
slug='indepth',
content_type=indepth_list_page_content_type,
path='000100010002',
depth=3,
numchild=0,
url_path='/home/indepth/',
)
home_page.numchild += 1
home_page.save()
class Migration(migrations.Migration):
dependencies = [
('articles', '0009_auto_20150619_2156'),
('contenttypes', '__latest__'),
('core', '__latest__'),
]
operations = [
migrations.RunPython(create_indepth_page),
]
|
c7550b21b8424bb11ad31b9755d910690e386b40
|
salt/_beacons/default_network_interface_settings.py
|
salt/_beacons/default_network_interface_settings.py
|
# -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
return network_settings.beacon({default_interface: config['watch']})
|
# -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
config = {default_interface: config['watch']}
if __salt__['test.version']() >= '2018.3.0':
config = [{'interfaces': config}]
log.debug("Newer salt version - adjusted config format: {0}".format(config))
return network_settings.beacon(config)
|
Adjust network_settings config format for salt 2018.3.0.
|
Adjust network_settings config format for salt 2018.3.0.
Before this release the format did not use the 'interfaces' key.
|
Python
|
apache-2.0
|
ereslibre/salt,ereslibre/salt,kubic-project/salt,kubic-project/salt,ereslibre/salt,kubic-project/salt
|
# -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
return network_settings.beacon({default_interface: config['watch']})
Adjust network_settings config format for salt 2018.3.0.
Before this release the format did not use the 'interfaces' key.
|
# -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
config = {default_interface: config['watch']}
if __salt__['test.version']() >= '2018.3.0':
config = [{'interfaces': config}]
log.debug("Newer salt version - adjusted config format: {0}".format(config))
return network_settings.beacon(config)
|
<commit_before># -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
return network_settings.beacon({default_interface: config['watch']})
<commit_msg>Adjust network_settings config format for salt 2018.3.0.
Before this release the format did not use the 'interfaces' key.<commit_after>
|
# -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
config = {default_interface: config['watch']}
if __salt__['test.version']() >= '2018.3.0':
config = [{'interfaces': config}]
log.debug("Newer salt version - adjusted config format: {0}".format(config))
return network_settings.beacon(config)
|
# -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
return network_settings.beacon({default_interface: config['watch']})
Adjust network_settings config format for salt 2018.3.0.
Before this release the format did not use the 'interfaces' key.# -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
config = {default_interface: config['watch']}
if __salt__['test.version']() >= '2018.3.0':
config = [{'interfaces': config}]
log.debug("Newer salt version - adjusted config format: {0}".format(config))
return network_settings.beacon(config)
|
<commit_before># -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
return network_settings.beacon({default_interface: config['watch']})
<commit_msg>Adjust network_settings config format for salt 2018.3.0.
Before this release the format did not use the 'interfaces' key.<commit_after># -*- coding: utf-8 -*-
'''
Beacon to monitor default network adapter setting changes on Linux
'''
from salt.beacons import network_settings
import logging
log = logging.getLogger(__name__)
__virtual_name__ = 'default_network_interface_settings'
def __virtual__():
if network_settings.__virtual__():
return __virtual_name__
return False
def __validate__(config):
return network_settings.__validate__(config)
def beacon(config):
'''
Watch for changes on network settings on the gateway interface.
By default, the beacon will emit when there is a value change on one of the
settings on watch. The config also support the onvalue parameter for each
setting, which instruct the beacon to only emit if the setting changed to the
value defined.
Example Config
.. code-block:: yaml
beacons:
default_network_interface_settings:
interval: 5
watch:
ipaddr:
promiscuity:
onvalue: 1
'''
default_interface = __salt__['network.default_route']()[0]['interface']
config = {default_interface: config['watch']}
if __salt__['test.version']() >= '2018.3.0':
config = [{'interfaces': config}]
log.debug("Newer salt version - adjusted config format: {0}".format(config))
return network_settings.beacon(config)
|
a3ebec3a7d5b47b008eb5c8bee51ad21188a8adf
|
testsuite/node_test.py
|
testsuite/node_test.py
|
#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 0
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
|
#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 1
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
|
Fix error code on non existent page
|
Fix error code on non existent page
|
Python
|
mit
|
c00w/BitToll,c00w/BitToll,c00w/BitToll,c00w/BitToll
|
#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 0
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
Fix error code on non existent page
|
#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 1
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
|
<commit_before>#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 0
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
<commit_msg>Fix error code on non existent page<commit_after>
|
#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 1
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
|
#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 0
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
Fix error code on non existent page#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 1
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
|
<commit_before>#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 0
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
<commit_msg>Fix error code on non existent page<commit_after>#!/usr/bin/env python2
from testlib import *
set_port("80")
set_server("us.bittoll.com")
def test_page_error():
r = apicall("nonexistant", "hi")
assert "error" in r
assert r["error"] != ""
assert int(r["error_code"]) == 1
def test_login(login):
info = login
assert 'username' in info
assert 'secret' in info
def pytest_funcarg__login(request):
info = register()
assert 'username' in info
assert 'secret' in info
return info
def test_balance(login):
info = balance(login['username'], login['secret'])
assert 'balance' in info
assert int(info["error_code"]) == 0
def test_request(login):
info = request(login['username'], login['secret'], amount=1)
assert 'payment' in info
assert int(info["error_code"]) == 0
def test_mine(login):
info = mine(login['username'], login['secret'])
assert 'result' in info
assert 'id' in info
assert 'error' in info
|
90c816bd40a4971dda8bd96d865efb1dee131566
|
files/install_workflow.py
|
files/install_workflow.py
|
#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
import_uuid = json.load(open(args.workflow_path, 'r')).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
with open(args.workflow_path, 'r') as wf_file:
import_uuid = json.load(wf_file).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
|
Make sure the opened workflow file gets closed after it's been loaded
|
Make sure the opened workflow file gets closed after it's been loaded
|
Python
|
mit
|
galaxyproject/ansible-galaxy-tools,galaxyproject/ansible-tools,nuwang/ansible-galaxy-tools,anmoljh/ansible-galaxy-tools
|
#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
import_uuid = json.load(open(args.workflow_path, 'r')).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
Make sure the opened workflow file gets closed after it's been loaded
|
#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
with open(args.workflow_path, 'r') as wf_file:
import_uuid = json.load(wf_file).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
import_uuid = json.load(open(args.workflow_path, 'r')).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
<commit_msg>Make sure the opened workflow file gets closed after it's been loaded<commit_after>
|
#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
with open(args.workflow_path, 'r') as wf_file:
import_uuid = json.load(wf_file).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
import_uuid = json.load(open(args.workflow_path, 'r')).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
Make sure the opened workflow file gets closed after it's been loaded#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
with open(args.workflow_path, 'r') as wf_file:
import_uuid = json.load(wf_file).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
import_uuid = json.load(open(args.workflow_path, 'r')).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
<commit_msg>Make sure the opened workflow file gets closed after it's been loaded<commit_after>#!/usr/bin/env python
import argparse
from bioblend import galaxy
import json
def main():
"""
This script uses bioblend to import .ga workflow files into a running instance of Galaxy
"""
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--workflow_path", help='Path to workflow file')
parser.add_argument("-g", "--galaxy",
dest="galaxy_url",
help="Target Galaxy instance URL/IP address (required "
"if not defined in the tools list file)",)
parser.add_argument("-a", "--apikey",
dest="api_key",
help="Galaxy admin user API key (required if not "
"defined in the tools list file)",)
args = parser.parse_args()
gi = galaxy.GalaxyInstance(url=args.galaxy_url, key=args.api_key)
with open(args.workflow_path, 'r') as wf_file:
import_uuid = json.load(wf_file).get('uuid')
existing_uuids = [d.get('latest_workflow_uuid') for d in gi.workflows.get_workflows()]
if import_uuid not in existing_uuids:
gi.workflows.import_workflow_from_local_path(args.workflow_path)
if __name__ == '__main__':
main()
|
c740a0118035023a3770334f35bba2cf4506dbdf
|
firecares/settings/production.py
|
firecares/settings/production.py
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
Disable query string auth for django compressor.
|
Disable query string auth for django compressor.
|
Python
|
mit
|
acengic/firecares,acengic/firecares,acengic/firecares,acengic/firecares
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
passDisable query string auth for django compressor.
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
<commit_before>from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
pass<commit_msg>Disable query string auth for django compressor.<commit_after>
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
passDisable query string auth for django compressor.from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
<commit_before>from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
try:
from local_settings import * # noqa
except ImportError:
pass<commit_msg>Disable query string auth for django compressor.<commit_after>from firecares.settings.base import *
INSTALLED_APPS = (
'django_statsd',
) + INSTALLED_APPS
STATSD_HOST = 'stats.garnertb.com'
STATSD_PREFIX = 'firecares'
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
MIDDLEWARE_CLASSES = (
'django_statsd.middleware.GraphiteRequestTimingMiddleware',
'django_statsd.middleware.GraphiteMiddleware',
'django_statsd.middleware.TastyPieRequestTimingMiddleware'
) + MIDDLEWARE_CLASSES
STATSD_PATCHES = [
'django_statsd.patches.db',
'django_statsd.patches.cache',
]
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'LOCATION': '127.0.0.1:11211',
}
}
AWS_STORAGE_BUCKET_NAME = 'firecares-static'
COMPRESS_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
COMPRESS_URL = "https://s3.amazonaws.com/firecares-static/"
COMPRESS_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATICFILES_STORAGE = "firecares.utils.CachedS3BotoStorage"
STATIC_URL = COMPRESS_URL
DEBUG = False
AWS_QUERYSTRING_AUTH = False
try:
from local_settings import * # noqa
except ImportError:
pass
|
135ac2fa3aa978328ba72db6ca84920f3da0f39a
|
furikura/desktop/unity.py
|
furikura/desktop/unity.py
|
import gi
import time
gi.require_version('Unity', '7.0')
from gi.repository import Unity, GObject
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
|
import gi
gi.require_version('Unity', '7.0')
from gi.repository import Unity
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
|
Remove unnecessary imports for Unity module
|
Remove unnecessary imports for Unity module
|
Python
|
mit
|
benjamindean/furi-kura,benjamindean/furi-kura
|
import gi
import time
gi.require_version('Unity', '7.0')
from gi.repository import Unity, GObject
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
Remove unnecessary imports for Unity module
|
import gi
gi.require_version('Unity', '7.0')
from gi.repository import Unity
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
|
<commit_before>import gi
import time
gi.require_version('Unity', '7.0')
from gi.repository import Unity, GObject
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
<commit_msg>Remove unnecessary imports for Unity module<commit_after>
|
import gi
gi.require_version('Unity', '7.0')
from gi.repository import Unity
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
|
import gi
import time
gi.require_version('Unity', '7.0')
from gi.repository import Unity, GObject
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
Remove unnecessary imports for Unity moduleimport gi
gi.require_version('Unity', '7.0')
from gi.repository import Unity
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
|
<commit_before>import gi
import time
gi.require_version('Unity', '7.0')
from gi.repository import Unity, GObject
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
<commit_msg>Remove unnecessary imports for Unity module<commit_after>import gi
gi.require_version('Unity', '7.0')
from gi.repository import Unity
def update_counter(count):
launcher = Unity.LauncherEntry.get_for_desktop_id("furikura.desktop")
launcher.set_property("count", count)
launcher.set_property("count_visible", True)
|
2559a1bd8cb8c41df165022074d1b123d4a0345a
|
hecuba_py/tests/storage_api_tests.py
|
hecuba_py/tests/storage_api_tests.py
|
import unittest
from storage.api import getByID
from hecuba.hdict import StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
pass
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))
|
import unittest
from storage.api import getByID
from hecuba import config, StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
config.reset(mock_cassandra=False)
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))
|
Reset Hecuba config when setUp a test
|
Reset Hecuba config when setUp a test
|
Python
|
apache-2.0
|
bsc-dd/hecuba,bsc-dd/hecuba,bsc-dd/hecuba,bsc-dd/hecuba
|
import unittest
from storage.api import getByID
from hecuba.hdict import StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
pass
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))Reset Hecuba config when setUp a test
|
import unittest
from storage.api import getByID
from hecuba import config, StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
config.reset(mock_cassandra=False)
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))
|
<commit_before>import unittest
from storage.api import getByID
from hecuba.hdict import StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
pass
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))<commit_msg>Reset Hecuba config when setUp a test<commit_after>
|
import unittest
from storage.api import getByID
from hecuba import config, StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
config.reset(mock_cassandra=False)
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))
|
import unittest
from storage.api import getByID
from hecuba.hdict import StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
pass
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))Reset Hecuba config when setUp a testimport unittest
from storage.api import getByID
from hecuba import config, StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
config.reset(mock_cassandra=False)
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))
|
<commit_before>import unittest
from storage.api import getByID
from hecuba.hdict import StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
pass
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))<commit_msg>Reset Hecuba config when setUp a test<commit_after>import unittest
from storage.api import getByID
from hecuba import config, StorageDict
class ApiTestSDict(StorageDict):
'''
@TypeSpec <<key:int>, value:double>
'''
class StorageApi_Tests(unittest.TestCase):
def setUp(self):
config.reset(mock_cassandra=False)
def class_type_test(self):
base_dict = ApiTestSDict('test.api_sdict')
storage_id = base_dict.getID()
del base_dict
rebuild_dict = getByID(storage_id)
self.assertTrue(isinstance(rebuild_dict, ApiTestSDict))
|
f6ce7485f18d3c5299b64a9b10af08f5da1c2335
|
infrastructure/control/osimctrl/src/start-opensim.py
|
infrastructure/control/osimctrl/src/start-opensim.py
|
#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = subprocess.check_output("screen -list", shell=True)
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
|
#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
### FUNCTIONS ###
def execCmd(cmd):
print "Executing command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
### SCRIPT ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = execCmd("screen -list")
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
os.chdir(binaryPath)
|
Create execCmd function and use
|
Create execCmd function and use
|
Python
|
bsd-3-clause
|
justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools
|
#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = subprocess.check_output("screen -list", shell=True)
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
Create execCmd function and use
|
#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
### FUNCTIONS ###
def execCmd(cmd):
print "Executing command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
### SCRIPT ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = execCmd("screen -list")
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
os.chdir(binaryPath)
|
<commit_before>#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = subprocess.check_output("screen -list", shell=True)
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
<commit_msg>Create execCmd function and use<commit_after>
|
#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
### FUNCTIONS ###
def execCmd(cmd):
print "Executing command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
### SCRIPT ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = execCmd("screen -list")
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
os.chdir(binaryPath)
|
#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = subprocess.check_output("screen -list", shell=True)
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
Create execCmd function and use#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
### FUNCTIONS ###
def execCmd(cmd):
print "Executing command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
### SCRIPT ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = execCmd("screen -list")
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
os.chdir(binaryPath)
|
<commit_before>#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = subprocess.check_output("screen -list", shell=True)
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
<commit_msg>Create execCmd function and use<commit_after>#!/usr/bin/python
import os.path
import re
import subprocess
import sys
### CONFIGURE THESE PATHS ###
binaryPath = "/home/opensim/opensim/opensim-current/bin"
pidPath = "/tmp/OpenSim.pid"
### END OF CONFIG ###
### FUNCTIONS ###
def execCmd(cmd):
print "Executing command: %s" % cmd
return subprocess.check_output(cmd, shell=True, stderr=subprocess.STDOUT)
### SCRIPT ###
if os.path.exists(pidPath):
print >> sys.stderr, "ERROR: OpenSim PID file %s still present. Assuming OpenSim has been started already." % pidPath
sys.exit(1)
# If PID isn't set then we'll check the screen list.
# However, this is a much less perfect mechanism since OpenSimulator may have been started outside screen
screenList = ""
try:
screenList = execCmd("screen -list")
except:
None
if re.match("\s+\d+\.OpenSim", screenList):
print >> sys.stderr, "ERROR: Screen session for OpenSim already started."
sys.exit(1)
os.chdir(binaryPath)
|
ee401cae95fe5244314a8cb032623779ac7e4842
|
aragog/routing/decorator.py
|
aragog/routing/decorator.py
|
#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404())
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
|
#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404)
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
|
Fix call to HTTP404 now it is a function.
|
Fix call to HTTP404 now it is a function.
|
Python
|
apache-2.0
|
bramwelt/aragog
|
#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404())
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
Fix call to HTTP404 now it is a function.
|
#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404)
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
|
<commit_before>#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404())
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
<commit_msg>Fix call to HTTP404 now it is a function.<commit_after>
|
#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404)
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
|
#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404())
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
Fix call to HTTP404 now it is a function.#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404)
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
|
<commit_before>#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404())
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
<commit_msg>Fix call to HTTP404 now it is a function.<commit_after>#! /usr/bin/env python
"""
Aragog Router Decorator
-----------------------
Convert any function into a WSGI endpoint with a simple decorator.
"""
from aragog.wsgi import get_url
from aragog.routing.client_error import HTTP404
class Router(object):
"""
Router holds the mapping of routes to callables.
"""
def __init__(self):
"""
Instance level route mapping
"""
self.mapping = {}
def __call__(self, environ, start_response):
"""
Get a WSGI request, and pass it on to the correct callable.
"""
routing = self.mapping.get(get_url(environ), HTTP404)
return routing(environ, start_response)
def add_route(self, url, func):
"""
Adds a route to the mapping
"""
if url not in self.mapping:
self.mapping[url] = func
else:
raise KeyError("Route already exists: {}".format(url))
def route(self, uri):
"""
Route a request to a function
:param: uri
:param_type: string
"""
def app_wrapper(f):
self.add_route(uri, f)
return f
return app_wrapper
|
1ef76b4f4395c9b5e3c2338822947999d5581013
|
labs/lab-3/ex-3-2.events.py
|
labs/lab-3/ex-3-2.events.py
|
#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
Add type field to source
|
Add type field to source
|
Python
|
apache-2.0
|
jdgwartney/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab
|
#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
Add type field to source
|
#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
<commit_before>#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
<commit_msg>Add type field to source<commit_after>
|
#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
Add type field to source#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
<commit_before>#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
<commit_msg>Add type field to source<commit_after>#!/usr/bin/env python
#
# Copyright 2016 BMC Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import tspapi
api = tspapi.API()
source = tspapi.Source(ref='myhost', _type='host')
api.event_create(title="bar", fingerprint_fields=['@title'], source=source)
|
3aeae9c3cde1976b43667d0a42039b8dd9bf52d9
|
bitHopper/Website/Worker_Page.py
|
bitHopper/Website/Worker_Page.py
|
from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if not pool.name:
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
|
from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
|
Add some debugging for the worker generation
|
Add some debugging for the worker generation
|
Python
|
mit
|
c00w/bitHopper,c00w/bitHopper
|
from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if not pool.name:
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
Add some debugging for the worker generation
|
from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
|
<commit_before>from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if not pool.name:
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
<commit_msg>Add some debugging for the worker generation<commit_after>
|
from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
|
from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if not pool.name:
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
Add some debugging for the worker generationfrom bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
|
<commit_before>from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if not pool.name:
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
<commit_msg>Add some debugging for the worker generation<commit_after>from bitHopper.Website import app, flask
import btcnet_info
import bitHopper.Configuration.Workers
@app.route("/worker", methods=['POST', 'GET'])
def worker():
#Check if this is a form submission
handle_worker_post(flask.request.form)
#Get a list of currently configured workers
pools_workers = {}
for pool in btcnet_info.get_pools():
if pool.name is None:
logging.debug('Ignoring %s', pool)
continue
pools_workers[pool.name] = bitHopper.Configuration.Workers.get_worker_from(pool.name)
return flask.render_template('worker.html', pools = pools_workers)
def handle_worker_post(post):
for item in ['method','username','password', 'pool']:
if item not in post:
return
if post['method'] == 'remove':
bitHopper.Configuration.Workers.remove(
post['pool'], post['username'], post['password'])
elif post['method'] == 'add':
bitHopper.Configuration.Workers.add(
post['pool'], post['username'], post['password'])
|
d5747c8b0f1a82afecf68aadc6b42c77e586493c
|
tools/perf/benchmarks/rasterize_and_record_micro.py
|
tools/perf/benchmarks/rasterize_and_record_micro.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_silk_cases.json'
|
Add rasterization microbenchmark for silk
|
Add rasterization microbenchmark for silk
Add rasterize_and_record_micro_key_silk_cases for keeping track of
rasterization and recording performance of silk content. This mirrors
the existing rasterize_and_record_key_silk_cases benchmark and will
potentially allow us to remove it if this microbenchmark produces less
noisy data.
BUG=339517
Review URL: https://codereview.chromium.org/177253003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@253403 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
markYoungH/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,ltilve/chromium,jaruba/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,markYoungH/chromium.src,dednal/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,patrickm/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,anirudhSK/chromium,M4sse/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,jaruba/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,fujunwei/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,M4sse/chromium.src,Just-D/chromium-1,littlstar/chromium.src,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,jaruba/chromium.src,dushu1203/chromium.src,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,M4sse/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,dednal/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,patrickm/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,ltilve/chromium,axinging/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,Just-D/chromium-1,Chilledheart/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,markYoungH/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,M4sse/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,jaruba/chromium.src,patrickm/chromium.src,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,dednal/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,dushu1203/chromium.src,M4sse/chromium.src,littlstar/chromium.src,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,dushu1203/chromium.src,patrickm/chromium.src,dednal/chromium.src,ltilve/chromium,anirudhSK/chromium,chuan9/chromium-crosswalk,anirudhSK/chromium,fujunwei/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,Chilledheart/chromium,dushu1203/chromium.src,ltilve/chromium,littlstar/chromium.src,dednal/chromium.src
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
Add rasterization microbenchmark for silk
Add rasterize_and_record_micro_key_silk_cases for keeping track of
rasterization and recording performance of silk content. This mirrors
the existing rasterize_and_record_key_silk_cases benchmark and will
potentially allow us to remove it if this microbenchmark produces less
noisy data.
BUG=339517
Review URL: https://codereview.chromium.org/177253003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@253403 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_silk_cases.json'
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
<commit_msg>Add rasterization microbenchmark for silk
Add rasterize_and_record_micro_key_silk_cases for keeping track of
rasterization and recording performance of silk content. This mirrors
the existing rasterize_and_record_key_silk_cases benchmark and will
potentially allow us to remove it if this microbenchmark produces less
noisy data.
BUG=339517
Review URL: https://codereview.chromium.org/177253003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@253403 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_silk_cases.json'
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
Add rasterization microbenchmark for silk
Add rasterize_and_record_micro_key_silk_cases for keeping track of
rasterization and recording performance of silk content. This mirrors
the existing rasterize_and_record_key_silk_cases benchmark and will
potentially allow us to remove it if this microbenchmark produces less
noisy data.
BUG=339517
Review URL: https://codereview.chromium.org/177253003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@253403 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_silk_cases.json'
|
<commit_before># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
<commit_msg>Add rasterization microbenchmark for silk
Add rasterize_and_record_micro_key_silk_cases for keeping track of
rasterization and recording performance of silk content. This mirrors
the existing rasterize_and_record_key_silk_cases benchmark and will
potentially allow us to remove it if this microbenchmark produces less
noisy data.
BUG=339517
Review URL: https://codereview.chromium.org/177253003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@253403 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import rasterize_and_record_micro
from telemetry import test
@test.Disabled('android', 'linux')
class RasterizeAndRecordMicroTop25(test.Test):
"""Measures rasterize and record performance on the top 25 web pages.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/top_25.json'
class RasterizeAndRecordMicroKeyMobileSites(test.Test):
"""Measures rasterize and record performance on the key mobile sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_mobile_sites.json'
class RasterizeAndRecordMicroKeySilkCases(test.Test):
"""Measures rasterize and record performance on the silk sites.
http://www.chromium.org/developers/design-documents/rendering-benchmarks"""
test = rasterize_and_record_micro.RasterizeAndRecordMicro
page_set = 'page_sets/key_silk_cases.json'
|
a6837da2e52753745b07f888ce3797cf0d0efd70
|
setup.py
|
setup.py
|
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
|
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py==8.1.2',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
|
Set path.py to a fixed version
|
Set path.py to a fixed version
|
Python
|
apache-2.0
|
dankilman/clash,dankilman/clash
|
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
Set path.py to a fixed version
|
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py==8.1.2',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
|
<commit_before>########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
<commit_msg>Set path.py to a fixed version<commit_after>
|
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py==8.1.2',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
|
########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
Set path.py to a fixed version########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py==8.1.2',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
|
<commit_before>########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
<commit_msg>Set path.py to a fixed version<commit_after>########
# Copyright (c) 2016 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
############
from setuptools import setup
setup(
name='clash',
version='0.16',
author='GigaSpaces',
author_email='cosmo-admin@gigaspaces.com',
packages=['clash'],
description='Framework to wrap Cloudify local based blueprints as CLIs',
license='Apache License, Version 2.0',
zip_safe=False,
install_requires=[
'argcomplete',
'ansicolors',
'argh',
'path.py==8.1.2',
'cloudify-plugins-common==3.3.1',
'cloudify-dsl-parser==3.3.1',
'cloudify-script-plugin==1.3.1'
]
)
|
a15701a49c1fffedc30f939c231be4936d3ab790
|
setup.py
|
setup.py
|
import setuptools
from valohai_yaml import __version__
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=__version__,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
|
import ast
import os
import re
import setuptools
with open(os.path.join(os.path.dirname(__file__), 'valohai_yaml', '__init__.py')) as infp:
version = ast.literal_eval(re.search('__version__ = (.+?)$', infp.read(), re.M).group(1))
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=version,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
|
Read version without importing package
|
Read version without importing package
|
Python
|
mit
|
valohai/valohai-yaml
|
import setuptools
from valohai_yaml import __version__
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=__version__,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
Read version without importing package
|
import ast
import os
import re
import setuptools
with open(os.path.join(os.path.dirname(__file__), 'valohai_yaml', '__init__.py')) as infp:
version = ast.literal_eval(re.search('__version__ = (.+?)$', infp.read(), re.M).group(1))
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=version,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
|
<commit_before>import setuptools
from valohai_yaml import __version__
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=__version__,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
<commit_msg>Read version without importing package<commit_after>
|
import ast
import os
import re
import setuptools
with open(os.path.join(os.path.dirname(__file__), 'valohai_yaml', '__init__.py')) as infp:
version = ast.literal_eval(re.search('__version__ = (.+?)$', infp.read(), re.M).group(1))
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=version,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
|
import setuptools
from valohai_yaml import __version__
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=__version__,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
Read version without importing packageimport ast
import os
import re
import setuptools
with open(os.path.join(os.path.dirname(__file__), 'valohai_yaml', '__init__.py')) as infp:
version = ast.literal_eval(re.search('__version__ = (.+?)$', infp.read(), re.M).group(1))
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=version,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
|
<commit_before>import setuptools
from valohai_yaml import __version__
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=__version__,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
<commit_msg>Read version without importing package<commit_after>import ast
import os
import re
import setuptools
with open(os.path.join(os.path.dirname(__file__), 'valohai_yaml', '__init__.py')) as infp:
version = ast.literal_eval(re.search('__version__ = (.+?)$', infp.read(), re.M).group(1))
dev_dependencies = [
'flake8',
'isort',
'pydocstyle',
'pytest-cov',
]
if __name__ == '__main__':
setuptools.setup(
name='valohai-yaml',
description='Valohai.yaml validation and parsing',
version=version,
url='https://github.com/valohai/valohai-yaml',
author='Valohai',
author_email='info@valohai.com',
maintainer='Aarni Koskela',
maintainer_email='akx@iki.fi',
license='MIT',
install_requires=['jsonschema', 'PyYAML', 'six'],
tests_require=dev_dependencies,
extras_require={'dev': dev_dependencies},
packages=setuptools.find_packages('.', exclude=('*tests*',)),
include_package_data=True,
entry_points={
'console_scripts': [
'valohai-yaml = valohai_yaml.__main__:main',
],
},
)
|
6a9d8a10d6fdf4f4cfdf8ae5af9b172d9b53e8e9
|
drawer.py
|
drawer.py
|
import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x')
plt.show()
|
import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
centroids_colors = [[1-x for x in color] for color in colors]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x')
plt.show()
|
Add drawing centroids with inverted colors
|
Add drawing centroids with inverted colors
|
Python
|
mit
|
vanashimko/k-means
|
import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x')
plt.show()
Add drawing centroids with inverted colors
|
import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
centroids_colors = [[1-x for x in color] for color in colors]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x')
plt.show()
|
<commit_before>import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x')
plt.show()
<commit_msg>Add drawing centroids with inverted colors<commit_after>
|
import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
centroids_colors = [[1-x for x in color] for color in colors]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x')
plt.show()
|
import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x')
plt.show()
Add drawing centroids with inverted colorsimport matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
centroids_colors = [[1-x for x in color] for color in colors]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x')
plt.show()
|
<commit_before>import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x')
plt.show()
<commit_msg>Add drawing centroids with inverted colors<commit_after>import matplotlib.pyplot as plt
import numpy as np
def display_result(vectors, clusters):
colors = [np.random.rand(3, 1) for i in range(len(clusters))]
centroids_colors = [[1-x for x in color] for color in colors]
for cluster_index, (centroid, cluster) in enumerate(clusters.items()):
current_cluster = [vectors[i] for i in cluster]
xs = list(map(lambda x: x[0], current_cluster))
ys = list(map(lambda x: x[1], current_cluster))
plt.scatter(xs, ys, c=colors[cluster_index])
plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x')
plt.show()
|
f82861e1698d101dc61ca8891b38e68f57262334
|
chroma-manager/chroma_cli/commands/__init__.py
|
chroma-manager/chroma_cli/commands/__init__.py
|
#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
from chroma_cli.commands.dispatcher import CommandDispatcher
CommandDispatcher # stupid pyflakes
|
#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
|
Remove some cruft that was accidentally pushed
|
Remove some cruft that was accidentally pushed
Change-Id: If75577316398c9d02230882766463f00aa13efd9
|
Python
|
mit
|
intel-hpdd/intel-manager-for-lustre,intel-hpdd/intel-manager-for-lustre,intel-hpdd/intel-manager-for-lustre
|
#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
from chroma_cli.commands.dispatcher import CommandDispatcher
CommandDispatcher # stupid pyflakes
Remove some cruft that was accidentally pushed
Change-Id: If75577316398c9d02230882766463f00aa13efd9
|
#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
|
<commit_before>#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
from chroma_cli.commands.dispatcher import CommandDispatcher
CommandDispatcher # stupid pyflakes
<commit_msg>Remove some cruft that was accidentally pushed
Change-Id: If75577316398c9d02230882766463f00aa13efd9<commit_after>
|
#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
|
#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
from chroma_cli.commands.dispatcher import CommandDispatcher
CommandDispatcher # stupid pyflakes
Remove some cruft that was accidentally pushed
Change-Id: If75577316398c9d02230882766463f00aa13efd9#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
|
<commit_before>#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
from chroma_cli.commands.dispatcher import CommandDispatcher
CommandDispatcher # stupid pyflakes
<commit_msg>Remove some cruft that was accidentally pushed
Change-Id: If75577316398c9d02230882766463f00aa13efd9<commit_after>#
# ========================================================
# Copyright (c) 2012 Whamcloud, Inc. All rights reserved.
# ========================================================
|
b76a7c4a60fbe3ea367a14e5fa19283fee062870
|
pinboard_linkrot.py
|
pinboard_linkrot.py
|
#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (SSLError, InvalidSchema, ConnectionError):
return 409
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
|
#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (Exception) as e:
return '%s: %s' % (type(e).__name__, str(e))
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
|
Return exception details when failing to load link
|
Return exception details when failing to load link
|
Python
|
mit
|
edgauthier/pinboard_linkrot
|
#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (SSLError, InvalidSchema, ConnectionError):
return 409
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
Return exception details when failing to load link
|
#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (Exception) as e:
return '%s: %s' % (type(e).__name__, str(e))
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
|
<commit_before>#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (SSLError, InvalidSchema, ConnectionError):
return 409
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
<commit_msg>Return exception details when failing to load link<commit_after>
|
#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (Exception) as e:
return '%s: %s' % (type(e).__name__, str(e))
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
|
#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (SSLError, InvalidSchema, ConnectionError):
return 409
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
Return exception details when failing to load link#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (Exception) as e:
return '%s: %s' % (type(e).__name__, str(e))
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
|
<commit_before>#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (SSLError, InvalidSchema, ConnectionError):
return 409
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
<commit_msg>Return exception details when failing to load link<commit_after>#!/usr/bin/env python
from __future__ import division
import requests
import json
import sys
from requests.exceptions import SSLError, InvalidSchema, ConnectionError
def get_link_status_code(link):
headers = {'User-agent':'Mozilla/5.0'}
try:
r = requests.head(link, headers=headers, allow_redirects=True)
return r.status_code
except (Exception) as e:
return '%s: %s' % (type(e).__name__, str(e))
def is_valid_link(status_code):
if status_code == 200:
return True
else:
return False
def process_links(links):
bad_links = 0
try:
for link in links:
status_code = get_link_status_code(link['href'])
if not is_valid_link(status_code):
print 'Invalid link (%s): %s [%s]' % (status_code, link['description'], link['href'])
bad_links += 1
except KeyboardInterrupt:
pass
linkrot = int(bad_links/len(links)*100)
print '\n%s%% linkrot\n' % linkrot
def process_bookmarks_file(filename):
with open(filename) as f:
bookmarks = json.load(f)
process_links(bookmarks)
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Usage: pinboard_linkrot.py <bookmarks.json>'
exit(1)
process_bookmarks_file(sys.argv[1])
|
84396970c866ced0264c4a84b1300df23fede36a
|
bermann/spark_context_test.py
|
bermann/spark_context_test.py
|
import unittest
from bermann.spark_context import SparkContext
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
|
import unittest
from bermann.spark_context import SparkContext
import bermann.rdd
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
def test_empty_rdd_returns_empty_rdd(self):
sc = SparkContext()
empty = sc.emptyRDD()
self.assertTrue(isinstance(empty, bermann.rdd.RDD))
self.assertEqual(0, empty.count())
|
Add test case for SparkContext.emptyRDD()
|
Add test case for SparkContext.emptyRDD()
|
Python
|
mit
|
oli-hall/bermann
|
import unittest
from bermann.spark_context import SparkContext
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
Add test case for SparkContext.emptyRDD()
|
import unittest
from bermann.spark_context import SparkContext
import bermann.rdd
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
def test_empty_rdd_returns_empty_rdd(self):
sc = SparkContext()
empty = sc.emptyRDD()
self.assertTrue(isinstance(empty, bermann.rdd.RDD))
self.assertEqual(0, empty.count())
|
<commit_before>import unittest
from bermann.spark_context import SparkContext
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
<commit_msg>Add test case for SparkContext.emptyRDD()<commit_after>
|
import unittest
from bermann.spark_context import SparkContext
import bermann.rdd
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
def test_empty_rdd_returns_empty_rdd(self):
sc = SparkContext()
empty = sc.emptyRDD()
self.assertTrue(isinstance(empty, bermann.rdd.RDD))
self.assertEqual(0, empty.count())
|
import unittest
from bermann.spark_context import SparkContext
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
Add test case for SparkContext.emptyRDD()import unittest
from bermann.spark_context import SparkContext
import bermann.rdd
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
def test_empty_rdd_returns_empty_rdd(self):
sc = SparkContext()
empty = sc.emptyRDD()
self.assertTrue(isinstance(empty, bermann.rdd.RDD))
self.assertEqual(0, empty.count())
|
<commit_before>import unittest
from bermann.spark_context import SparkContext
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
<commit_msg>Add test case for SparkContext.emptyRDD()<commit_after>import unittest
from bermann.spark_context import SparkContext
import bermann.rdd
class TestSparkContext(unittest.TestCase):
def test_parallelize_with_list_input(self):
sc = SparkContext()
self.assertEqual([1, 2, 3], sc.parallelize([1, 2, 3]).collect())
def test_parallelize_with_generator_input(self):
sc = SparkContext()
def gen_range(i):
for i in range(i):
yield i
self.assertEqual([0, 1, 2, 3], sc.parallelize(gen_range(4)).collect())
def test_empty_rdd_returns_empty_rdd(self):
sc = SparkContext()
empty = sc.emptyRDD()
self.assertTrue(isinstance(empty, bermann.rdd.RDD))
self.assertEqual(0, empty.count())
|
3452603d99d82c76e3119c2da77c2f4a63777611
|
assisstant/keyboard/ui/components.py
|
assisstant/keyboard/ui/components.py
|
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq, color):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def suspendFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
|
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq=1, color=Qt.black):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def setFreq(self, freq):
self.freq = freq
def setColor(self, color):
self.brushes[1] = QBrush(color)
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
def stopFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
|
Add setFreq/setColor methods for FlashingBox
|
Add setFreq/setColor methods for FlashingBox
|
Python
|
apache-2.0
|
brainbots/assistant
|
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq, color):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def suspendFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
Add setFreq/setColor methods for FlashingBox
|
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq=1, color=Qt.black):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def setFreq(self, freq):
self.freq = freq
def setColor(self, color):
self.brushes[1] = QBrush(color)
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
def stopFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
|
<commit_before>from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq, color):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def suspendFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
<commit_msg>Add setFreq/setColor methods for FlashingBox<commit_after>
|
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq=1, color=Qt.black):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def setFreq(self, freq):
self.freq = freq
def setColor(self, color):
self.brushes[1] = QBrush(color)
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
def stopFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
|
from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq, color):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def suspendFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
Add setFreq/setColor methods for FlashingBoxfrom PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq=1, color=Qt.black):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def setFreq(self, freq):
self.freq = freq
def setColor(self, color):
self.brushes[1] = QBrush(color)
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
def stopFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
|
<commit_before>from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq, color):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def suspendFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
<commit_msg>Add setFreq/setColor methods for FlashingBox<commit_after>from PyQt5.QtWidgets import QOpenGLWidget
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPainter, QBrush
class FlashingBox(QOpenGLWidget):
def __init__(self, parent, freq=1, color=Qt.black):
super(FlashingBox, self).__init__(parent)
self.freq = freq
self.brushes = [QBrush(Qt.black), QBrush(color)]
self.index = 0
self.enabled = False
def setFreq(self, freq):
self.freq = freq
def setColor(self, color):
self.brushes[1] = QBrush(color)
def timerEvent(self, event):
if self.enabled:
self.index = (self.index + 1) % 2
else:
self.index = 0
self.update()
def paintEvent(self, event):
painter = QPainter(self)
painter.fillRect(event.rect(), self.brushes[self.index])
def startFlashing(self):
self.index = 0
self.enabled = True
delay = int(1000/(2 * self.freq)) #in mSec
self._timer = self.startTimer(delay, Qt.PreciseTimer)
def stopFlashing(self):
self.killTimer(self._timer)
self.enabled=False
self.index = 0
self.update()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.