commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b65b513b4079a0d4a7ed7e59962d1758e64d854c
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro >= 0.6.8',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
|
Fix rosdistro requirements that were broken
|
Fix rosdistro requirements that were broken
Pip doesn't do transitive dependencies so we must specify it here. This
commit was needed due to ros-infrastructure/rosinstall_generator#44
|
Python
|
mit
|
Rayman/ros-get,Rayman/ros-get
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
Fix rosdistro requirements that were broken
Pip doesn't do transitive dependencies so we must specify it here. This
commit was needed due to ros-infrastructure/rosinstall_generator#44
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro >= 0.6.8',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
<commit_msg>Fix rosdistro requirements that were broken
Pip doesn't do transitive dependencies so we must specify it here. This
commit was needed due to ros-infrastructure/rosinstall_generator#44<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro >= 0.6.8',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
Fix rosdistro requirements that were broken
Pip doesn't do transitive dependencies so we must specify it here. This
commit was needed due to ros-infrastructure/rosinstall_generator#44#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro >= 0.6.8',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
<commit_msg>Fix rosdistro requirements that were broken
Pip doesn't do transitive dependencies so we must specify it here. This
commit was needed due to ros-infrastructure/rosinstall_generator#44<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='ros_get',
package_dir={'': 'src'}, # tell distutils packages are under src
packages=find_packages('src'), # include all packages under src
install_requires=[
'argcomplete',
'catkin_pkg',
'catkin_tools',
'colorlog',
'future',
'mock',
'rosdep',
'rosdistro >= 0.6.8',
'rosinstall_generator',
'trollius', # remove when catkin>0.4.4 is released
'vcstools',
'xdg==1.0.7',
],
entry_points={'console_scripts': ['ros-get=ros_get.__main__:main']}, )
|
d9b0128bf8058a12a08d7dc54fb30d2f588a16c1
|
setup.py
|
setup.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+https://github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
Use https for git dependency
|
Use https for git dependency
|
Python
|
mpl-2.0
|
EricRahm/atsy,EricRahm/atsy,EricRahm/atsy
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
Use https for git dependency
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+https://github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
<commit_msg>Use https for git dependency<commit_after>
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+https://github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
Use https for git dependency# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+https://github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+ssh://git@github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
<commit_msg>Use https for git dependency<commit_after># This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from setuptools import setup
setup(
name="atsy",
version="0.0.1",
description="AreTheySlimYet",
long_description="A set of tools for measuring cross-browser, cross-platform memory usage.",
url="https://github.com/EricRahm/atsy",
author="Eric Rahm",
author_email="erahm@mozilla.com",
license="MPL 2.0",
classifiers=[
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)"
],
packages=["atsy"],
install_requires=[
"selenium",
"marionette-client",
"psutil==3.5.0",
],
dependency_links=[
# We need to use a fork of psutil until USS calculations get integrated.
"git+https://github.com/ericrahm/psutil@release-3.5.0#egg=psutil-3.5.0"
],
)
|
a4168c405016751ca78196fc9f6abaa2bafd833a
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + '\n\n' + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
|
Divide changelog and readme when uploading
|
Divide changelog and readme when uploading
|
Python
|
mit
|
andrewgross/json2parquet
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
Divide changelog and readme when uploading
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + '\n\n' + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
<commit_msg>Divide changelog and readme when uploading<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + '\n\n' + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
Divide changelog and readme when uploading#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + '\n\n' + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
<commit_msg>Divide changelog and readme when uploading<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import re
from setuptools import setup, find_packages
with open('json2parquet/__init__.py', 'r') as fd:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('CHANGELOG.rst') as changelog_file:
changelog = changelog_file.read()
setup(
name='json2parquet',
version=version,
description='A simple Parquet converter for JSON/python data',
long_description=readme + '\n\n' + changelog,
author='Andrew Gross',
author_email='andrew.w.gross@gmail.com',
url='https://github.com/andrewgross/json2parquet',
install_requires=[
'pyarrow==0.6.0'
],
packages=[n for n in find_packages() if not n.startswith('tests')],
include_package_data=True,
)
|
01f165a826cde563d831b5a26598c4ebeb504820
|
setup.py
|
setup.py
|
# Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=['cffi>=1.0.0', 'numpy'],
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
|
# Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
def get_install_requires():
from distutils.version import StrictVersion
from sys import version_info
install_requires = ['cffi>=1.0.0', 'numpy']
py_version = StrictVersion('.'.join(str(n) for n in version_info[:3]))
if py_version < StrictVersion('3.4'):
install_requires.append('enum34')
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=get_install_requires(),
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
|
Add enum34 install dependency for python < 3.4
|
Add enum34 install dependency for python < 3.4
|
Python
|
bsd-3-clause
|
ghisvail/nfft-cffi
|
# Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=['cffi>=1.0.0', 'numpy'],
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
Add enum34 install dependency for python < 3.4
|
# Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
def get_install_requires():
from distutils.version import StrictVersion
from sys import version_info
install_requires = ['cffi>=1.0.0', 'numpy']
py_version = StrictVersion('.'.join(str(n) for n in version_info[:3]))
if py_version < StrictVersion('3.4'):
install_requires.append('enum34')
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=get_install_requires(),
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
|
<commit_before># Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=['cffi>=1.0.0', 'numpy'],
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
<commit_msg>Add enum34 install dependency for python < 3.4<commit_after>
|
# Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
def get_install_requires():
from distutils.version import StrictVersion
from sys import version_info
install_requires = ['cffi>=1.0.0', 'numpy']
py_version = StrictVersion('.'.join(str(n) for n in version_info[:3]))
if py_version < StrictVersion('3.4'):
install_requires.append('enum34')
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=get_install_requires(),
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
|
# Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=['cffi>=1.0.0', 'numpy'],
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
Add enum34 install dependency for python < 3.4# Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
def get_install_requires():
from distutils.version import StrictVersion
from sys import version_info
install_requires = ['cffi>=1.0.0', 'numpy']
py_version = StrictVersion('.'.join(str(n) for n in version_info[:3]))
if py_version < StrictVersion('3.4'):
install_requires.append('enum34')
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=get_install_requires(),
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
|
<commit_before># Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=['cffi>=1.0.0', 'numpy'],
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
<commit_msg>Add enum34 install dependency for python < 3.4<commit_after># Copyright (c) 2016, Imperial College London
# Copyright (c) 2016, Ghislain Antony Vaillant
# All rights reserved.
#
# Distributed under the terms of the new BSD license.
# See the accompanying LICENSE file or read the terms at
# https://opensource.org/licenses/BSD-3-Clause.
from setuptools import find_packages, setup
def get_install_requires():
from distutils.version import StrictVersion
from sys import version_info
install_requires = ['cffi>=1.0.0', 'numpy']
py_version = StrictVersion('.'.join(str(n) for n in version_info[:3]))
if py_version < StrictVersion('3.4'):
install_requires.append('enum34')
setup(
packages=find_packages(exclude=['builders', 'docs', 'tests']),
setup_requires=['cffi>=1.0.0', 'pkgconfig'],
install_requires=get_install_requires(),
test_requires=['nose'],
ext_package='nfft',
cffi_modules=['builders/build_bindings.py:ffi'],
)
|
259555775c098153b1715f85561309b42e29ee7d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'],
**_setup_args)
|
#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'], scripts=['scripts/avena'],
**_setup_args)
|
Install the script with the library.
|
Install the script with the library.
|
Python
|
isc
|
eliteraspberries/avena
|
#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'],
**_setup_args)
Install the script with the library.
|
#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'], scripts=['scripts/avena'],
**_setup_args)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'],
**_setup_args)
<commit_msg>Install the script with the library.<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'], scripts=['scripts/avena'],
**_setup_args)
|
#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'],
**_setup_args)
Install the script with the library.#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'], scripts=['scripts/avena'],
**_setup_args)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'],
**_setup_args)
<commit_msg>Install the script with the library.<commit_after>#!/usr/bin/env python
from distutils.core import setup
from avena import avena
_classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: ISC License (ISCL)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Multimedia :: Graphics',
]
with open('README.rst', 'r') as rst_file:
_long_description = rst_file.read()
_setup_args = {
'author': avena.__author__,
'author_email': avena.__email__,
'classifiers': _classifiers,
'description': avena.__doc__,
'license': avena.__license__,
'long_description': _long_description,
'name': 'Avena',
'url': 'https://bitbucket.org/eliteraspberries/avena',
'version': avena.__version__,
}
if __name__ == '__main__':
setup(packages=['avena'], scripts=['scripts/avena'],
**_setup_args)
|
ff5bc4ed9f7fda3fa1ed2385ace7f11f7a590104
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.md').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.rst').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
|
Read README.rst into long_description instead
|
Read README.rst into long_description instead
|
Python
|
mit
|
mrdakoki/ballin-avenger,natea/django-deployer,mrdakoki/ballin-avenger,natea/django-deployer
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.md').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
Read README.rst into long_description instead
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.rst').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.md').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
<commit_msg>Read README.rst into long_description instead<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.rst').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.md').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
Read README.rst into long_description instead#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.rst').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.md').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
<commit_msg>Read README.rst into long_description instead<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(
name="django-deployer",
version="0.1.0",
description="Django deployment utility for popular PaaS providers",
long_description=open('README.rst').read(),
author="Nate Aune",
author_email="nate@appsembler.com",
url="https://github.com/natea/django-deployer",
packages=find_packages(),
install_requires=[
'fabric==1.6.0', # formerly 1.4.3
'jinja2==2.6',
'heroku',
'dotcloud',
'gondor',
'pyyaml',
'sphinx==1.1.3',
],
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: GPL License",
"Programming Language :: Python",
),
entry_points={
'console_scripts' : [
'deployer-init = django_deployer.main:add_fabfile',
]
},
)
|
efe86c67f287b33701dcdb2198f2fef587ea0be1
|
setup.py
|
setup.py
|
import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
|
import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(exclude=['tests']),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
|
Exclude tests directory from installation
|
Exclude tests directory from installation
Fixes #223
|
Python
|
mit
|
thelazier/electrumx,thelazier/electrumx,Groestlcoin/electrumx-grs,Crowndev/electrumx,erasmospunk/electrumx,erasmospunk/electrumx,shsmith/electrumx,Crowndev/electrumx,Groestlcoin/electrumx-grs,shsmith/electrumx
|
import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
Exclude tests directory from installation
Fixes #223
|
import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(exclude=['tests']),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
|
<commit_before>import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
<commit_msg>Exclude tests directory from installation
Fixes #223<commit_after>
|
import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(exclude=['tests']),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
|
import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
Exclude tests directory from installation
Fixes #223import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(exclude=['tests']),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
|
<commit_before>import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
<commit_msg>Exclude tests directory from installation
Fixes #223<commit_after>import setuptools
from server.version import VERSION
setuptools.setup(
name='electrumx',
version=VERSION.split()[-1],
scripts=['electrumx_server.py', 'electrumx_rpc.py'],
python_requires='>=3.5.3',
# "irc" package is only required if IRC connectivity is enabled
# via environment variables, in which case I've tested with 15.0.4
# "x11_hash" package (1.4) is required to sync DASH network.
install_requires=['plyvel', 'pylru', 'irc', 'aiohttp >= 1'],
packages=setuptools.find_packages(exclude=['tests']),
description='ElectrumX Server',
author='Neil Booth',
author_email='kyuupichan@gmail.com',
license='MIT Licence',
url='https://github.com/kyuupichan/electrumx/',
long_description='Server implementation for the Electrum wallet',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
],
)
|
7683926de34fac26df920ecede3735cd347d493b
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
|
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
|
Change python version 3 -> 3.4
|
Change python version 3 -> 3.4
|
Python
|
mit
|
yukirin/LifeGame-kivy
|
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
Change python version 3 -> 3.4
|
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
|
<commit_before>
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
<commit_msg>Change python version 3 -> 3.4<commit_after>
|
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
|
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
Change python version 3 -> 3.4
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
|
<commit_before>
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
<commit_msg>Change python version 3 -> 3.4<commit_after>
from setuptools import setup, find_packages
setup(
name='pylife',
version='0.1.0',
description='Conway\'s Game of Life with kivy',
author='yukirin',
author_email='standupdown@gmail.com',
url='https://github.com/yukirin/LifeGame-kivy',
license='MIT',
keywords=['python', 'game', 'life game', 'kivy'],
zip_safe=False,
platforms=['Linux'],
packages=find_packages(),
package_data={'pylife': ['lifegame*.*']},
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Development Status :: 4 - Beta',
'Operating System :: POSIX :: Linux',
'Natural Language :: Japanese',
'License :: OSI Approved :: MIT License',
'Topic :: Games/Entertainment'
],
install_requires = ['kivy'],
entry_points={
'gui_scripts': ['pylife = pylife.main:run']
},
)
|
3f20852f2e09f0eed6f5f7c227a10d87763f5686
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
|
Remove support for python below 2.7 version
|
Remove support for python below 2.7 version
|
Python
|
mit
|
localmed/pyserializer,localmed/pyserializer
|
from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
Remove support for python below 2.7 version
|
from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Remove support for python below 2.7 version<commit_after>
|
from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
|
from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
Remove support for python below 2.7 versionfrom setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
|
<commit_before>from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
<commit_msg>Remove support for python below 2.7 version<commit_after>from setuptools import find_packages, setup
setup(
name='pyserializer',
version='0.0.2',
description='Simple python serialization library.',
author='LocalMed',
author_email='ecordell@localmed.com, pete@localmed.com, joel.james@localmed.com',
url='',
license='MIT',
packages=find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=[
'six==1.8.0'
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
],
)
|
c2b11e603de32d65f5f5ddf500c4e04d3bcce4fd
|
setup.py
|
setup.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, author_email='support@gittip.com'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
Add missing metadata to suppress warning
|
Add missing metadata to suppress warning
Doesn't fix the "503 Backend is unhealthy" error I'm getting from
`python setup.py register`, however.
|
Python
|
mit
|
gratipay/dependency_injection.py,gratipay/dependency_injection.py
|
from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add missing metadata to suppress warning
Doesn't fix the "503 Backend is unhealthy" error I'm getting from
`python setup.py register`, however.
|
from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, author_email='support@gittip.com'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add missing metadata to suppress warning
Doesn't fix the "503 Backend is unhealthy" error I'm getting from
`python setup.py register`, however.<commit_after>
|
from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, author_email='support@gittip.com'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
Add missing metadata to suppress warning
Doesn't fix the "503 Backend is unhealthy" error I'm getting from
`python setup.py register`, however.from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, author_email='support@gittip.com'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
<commit_before>from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
<commit_msg>Add missing metadata to suppress warning
Doesn't fix the "503 Backend is unhealthy" error I'm getting from
`python setup.py register`, however.<commit_after>from __future__ import absolute_import, division, print_function, unicode_literals
from setuptools import setup
setup( name='dependency_injection'
, author='Gittip, LLC'
, author_email='support@gittip.com'
, description="dependency_injection helpers"
, url='https://dependency-injection-py.readthedocs.org'
, version='0.0.0-dev'
, py_modules=['dependency_injection']
, classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Topic :: Database :: Front-Ends',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
2bca6bcf364c2c132a54bb3b1cc27f38601dbad2
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.4',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.5',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
license='MIT',
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
|
Add license, update minor version
|
Add license, update minor version
|
Python
|
mit
|
guillermo-carrasco/bcbio-nextgen-monitor,guillermo-carrasco/bcbio-nextgen-monitor,guillermo-carrasco/bcbio-nextgen-monitor,guillermo-carrasco/bcbio-nextgen-monitor
|
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.4',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
Add license, update minor version
|
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.5',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
license='MIT',
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.4',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
<commit_msg>Add license, update minor version<commit_after>
|
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.5',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
license='MIT',
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.4',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
Add license, update minor version#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.5',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
license='MIT',
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.4',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
<commit_msg>Add license, update minor version<commit_after>#!/usr/bin/env python
from setuptools import setup, find_packages
try:
with open("requirements.txt", "r") as f:
install_requires = [x.strip() for x in f.readlines()]
except IOError:
install_requires = []
setup(name='bcbio_monitor',
# For versioning: http://semver.org/
version='1.0.5',
description="bcbio-monitor is an extension of bcbio-nextgen to visualize its progress",
author='Guillermo Carrasco',
author_email='guille.ch.88@gmail.com',
url='https://github.com/guillermo-carrasco/bcbio-nextgen-monitor',
packages=find_packages(),
include_package_data=True,
keywords=['bcbio', 'bcbio-nextgen', 'bioinformatics', 'genomics'],
zip_safe=True,
license='MIT',
entry_points={
'console_scripts': [
'bcbio_monitor = bcbio_monitor.cli:cli',
],
},
install_requires=install_requires
)
|
8fb48cd9133ac26323ca080b6c7c820f7b729e05
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
def listify(filename):
return [line for line in open(filename, 'r').read().split('\n') if line]
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
|
from setuptools import setup, find_packages
def listify(filename):
with open(filename, "r") as f:
return list(filter(None, f.read().splitlines()))
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
|
Fix listify to close file object
|
Fix listify to close file object
|
Python
|
bsd-3-clause
|
romgar/django-dirtyfields,smn/django-dirtyfields
|
from setuptools import setup, find_packages
def listify(filename):
return [line for line in open(filename, 'r').read().split('\n') if line]
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
Fix listify to close file object
|
from setuptools import setup, find_packages
def listify(filename):
with open(filename, "r") as f:
return list(filter(None, f.read().splitlines()))
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
|
<commit_before>from setuptools import setup, find_packages
def listify(filename):
return [line for line in open(filename, 'r').read().split('\n') if line]
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
<commit_msg>Fix listify to close file object<commit_after>
|
from setuptools import setup, find_packages
def listify(filename):
with open(filename, "r") as f:
return list(filter(None, f.read().splitlines()))
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
|
from setuptools import setup, find_packages
def listify(filename):
return [line for line in open(filename, 'r').read().split('\n') if line]
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
Fix listify to close file objectfrom setuptools import setup, find_packages
def listify(filename):
with open(filename, "r") as f:
return list(filter(None, f.read().splitlines()))
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
|
<commit_before>from setuptools import setup, find_packages
def listify(filename):
return [line for line in open(filename, 'r').read().split('\n') if line]
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
<commit_msg>Fix listify to close file object<commit_after>from setuptools import setup, find_packages
def listify(filename):
with open(filename, "r") as f:
return list(filter(None, f.read().splitlines()))
setup(
name="django-dirtyfields",
version="1.4",
url='http://github.com/romgar/django-dirtyfields',
license='BSD',
description=("Tracking dirty fields on a Django model instance "
"(actively maintained)"),
long_description=open('README.rst', 'r').read(),
author='Romain Garrigues',
packages=find_packages('src'),
package_dir={'': 'src'},
install_requires=listify('requirements.txt'),
classifiers=listify('CLASSIFIERS.txt')
)
|
ead3b9d90132e95bdd72b40e6c58112de81ded8d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="eodatasets",
version="0.1b",
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
version = "0.1b"
# Append TeamCity build number if it gives us one.
if 'BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['BUILD_NUMBER']
setup(
name="eodatasets",
version=version,
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
|
Append build number to version if available.
|
Append build number to version if available.
|
Python
|
apache-2.0
|
GeoscienceAustralia/eo-datasets,GeoscienceAustralia/eo-datasets,jeremyh/eo-datasets,jeremyh/eo-datasets
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="eodatasets",
version="0.1b",
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
Append build number to version if available.
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
version = "0.1b"
# Append TeamCity build number if it gives us one.
if 'BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['BUILD_NUMBER']
setup(
name="eodatasets",
version=version,
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="eodatasets",
version="0.1b",
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
<commit_msg>Append build number to version if available.<commit_after>
|
#!/usr/bin/env python
import os
from setuptools import setup, find_packages
version = "0.1b"
# Append TeamCity build number if it gives us one.
if 'BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['BUILD_NUMBER']
setup(
name="eodatasets",
version=version,
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="eodatasets",
version="0.1b",
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
Append build number to version if available.#!/usr/bin/env python
import os
from setuptools import setup, find_packages
version = "0.1b"
# Append TeamCity build number if it gives us one.
if 'BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['BUILD_NUMBER']
setup(
name="eodatasets",
version=version,
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name="eodatasets",
version="0.1b",
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
<commit_msg>Append build number to version if available.<commit_after>#!/usr/bin/env python
import os
from setuptools import setup, find_packages
version = "0.1b"
# Append TeamCity build number if it gives us one.
if 'BUILD_NUMBER' in os.environ and version.endswith('b'):
version += '' + os.environ['BUILD_NUMBER']
setup(
name="eodatasets",
version=version,
packages=find_packages(),
install_requires=[
'click',
'python-dateutil',
'gdal',
'numpy',
'pathlib',
'pyyaml',
],
entry_points='''
[console_scripts]
eod-package=eodatasets.scripts.package:cli
eod-generate-browse=eodatasets.scripts.generatebrowse:cli
''',
)
|
af955bf1189d4af612fbdb2bab94d6c39bb489b3
|
setup.py
|
setup.py
|
import setuptools
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
requires=['mpyq'],
install_requires=['mpyq==0.1.5'],
packages=['sc2reader', 'sc2reader.scripts'],
)
|
import setuptools, sys
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
install_requires=['mpyq','argparse'] if float(sys.version[:3]) < 2.7 else ['mpyq'],
packages=['sc2reader', 'sc2reader.scripts'],
)
|
Add argparse as a dependency for python versions < 2.7
|
Add argparse as a dependency for python versions < 2.7
|
Python
|
mit
|
StoicLoofah/sc2reader,ggtracker/sc2reader,vlaufer/sc2reader,GraylinKim/sc2reader,GraylinKim/sc2reader,ggtracker/sc2reader,dsjoerg/sc2reader,dsjoerg/sc2reader,StoicLoofah/sc2reader,vlaufer/sc2reader
|
import setuptools
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
requires=['mpyq'],
install_requires=['mpyq==0.1.5'],
packages=['sc2reader', 'sc2reader.scripts'],
)
Add argparse as a dependency for python versions < 2.7
|
import setuptools, sys
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
install_requires=['mpyq','argparse'] if float(sys.version[:3]) < 2.7 else ['mpyq'],
packages=['sc2reader', 'sc2reader.scripts'],
)
|
<commit_before>import setuptools
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
requires=['mpyq'],
install_requires=['mpyq==0.1.5'],
packages=['sc2reader', 'sc2reader.scripts'],
)
<commit_msg>Add argparse as a dependency for python versions < 2.7<commit_after>
|
import setuptools, sys
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
install_requires=['mpyq','argparse'] if float(sys.version[:3]) < 2.7 else ['mpyq'],
packages=['sc2reader', 'sc2reader.scripts'],
)
|
import setuptools
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
requires=['mpyq'],
install_requires=['mpyq==0.1.5'],
packages=['sc2reader', 'sc2reader.scripts'],
)
Add argparse as a dependency for python versions < 2.7import setuptools, sys
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
install_requires=['mpyq','argparse'] if float(sys.version[:3]) < 2.7 else ['mpyq'],
packages=['sc2reader', 'sc2reader.scripts'],
)
|
<commit_before>import setuptools
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
requires=['mpyq'],
install_requires=['mpyq==0.1.5'],
packages=['sc2reader', 'sc2reader.scripts'],
)
<commit_msg>Add argparse as a dependency for python versions < 2.7<commit_after>import setuptools, sys
setuptools.setup(
name="sc2reader",
version="0.2.0",
license="MIT",
author="Graylin Kim",
author_email="graylin.kim@gmail.com",
url="https://github.com/GraylinKim/sc2reader",
description="Utility for parsing Starcraft II replay files",
long_description=''.join(open("README.txt").readlines()),
keywords=["starcraft 2","sc2","parser","replay"],
classifiers=[
"Environment :: Console",
"Development Status :: 4 - Beta",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Environment :: Other Environment",
"Topic :: Utilities",
"Topic :: Software Development :: Libraries",
"Topic :: Games/Entertainment :: Real Time Strategy",
],
entry_points={
'console_scripts': [
'sc2autosave = sc2reader.scripts.sc2autosave:main',
'sc2printer = sc2reader.scripts.sc2printer:main',
'sc2store = sc2reader.scripts.sc2store:main',
]
},
install_requires=['mpyq','argparse'] if float(sys.version[:3]) < 2.7 else ['mpyq'],
packages=['sc2reader', 'sc2reader.scripts'],
)
|
bf73317955f372246c30fc82d977a247dbc839f6
|
setup.py
|
setup.py
|
# coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
|
# coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
python_requires='>=3',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
|
Add config to support python >= 3
|
Add config to support python >= 3
|
Python
|
mit
|
edasi/kool
|
# coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
Add config to support python >= 3
|
# coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
python_requires='>=3',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
|
<commit_before># coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
<commit_msg>Add config to support python >= 3<commit_after>
|
# coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
python_requires='>=3',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
|
# coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
Add config to support python >= 3# coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
python_requires='>=3',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
|
<commit_before># coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
<commit_msg>Add config to support python >= 3<commit_after># coding=utf-8
from setuptools import setup, find_packages
from codecs import open
import os
def read(fname):
path = os.path.join(os.path.dirname(__file__), fname)
return open(path, encoding='utf-8').read()
setup(
name="Kool",
version="0.0.1",
packages=find_packages(),
# development metadata
zip_safe=False,
# metadata for upload to PyPI
author="Antony Orenge",
author_email="orenge@ut.ee",
description="Kool is an open source platform for online classroom management. ",
license="MIT",
keywords="education learning database nosql",
url="https://github.com/edasi/kool",
python_requires='>=3',
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: PyPy",
"Operating System :: OS Independent"
],
long_description=read('README.md'),
)
|
6db55e993cb4a93aeede2cd9aff244e2c517fa06
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/PyFirebaseTokenGenerator',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)
|
from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/firebase-token-generator-python',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)
|
Fix repo URL after rename
|
Fix repo URL after rename
|
Python
|
mit
|
googlearchive/firebase-token-generator-python
|
from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/PyFirebaseTokenGenerator',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)Fix repo URL after rename
|
from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/firebase-token-generator-python',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)
|
<commit_before>from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/PyFirebaseTokenGenerator',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)<commit_msg>Fix repo URL after rename<commit_after>
|
from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/firebase-token-generator-python',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)
|
from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/PyFirebaseTokenGenerator',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)Fix repo URL after renamefrom distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/firebase-token-generator-python',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)
|
<commit_before>from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/PyFirebaseTokenGenerator',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)<commit_msg>Fix repo URL after rename<commit_after>from distutils.core import setup
setup(
name='firebase-token-generator',
version='1.2',
author='Greg Soltis',
author_email='greg@firebase.com',
py_modules=['firebase_token_generator'],
license='LICENSE',
url='https://github.com/firebase/firebase-token-generator-python',
description='A utility to generate signed Firebase Authentication Tokens',
long_description=open('README.md').read()
)
|
e48e8e28da5b6f4e7ccaad7a4fd1b4d0e9eff863
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.0.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.1.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases or S3 files to Redshift clusters",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
Bump version for v1.1.0 release
|
Bump version for v1.1.0 release
|
Python
|
mit
|
harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl,harrystech/arthur-redshift-etl
|
from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.0.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
Bump version for v1.1.0 release
|
from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.1.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases or S3 files to Redshift clusters",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
<commit_before>from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.0.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
<commit_msg>Bump version for v1.1.0 release<commit_after>
|
from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.1.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases or S3 files to Redshift clusters",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.0.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
Bump version for v1.1.0 releasefrom setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.1.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases or S3 files to Redshift clusters",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
<commit_before>from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.0.1",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases (or S3 files) to Redshift cluster",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
<commit_msg>Bump version for v1.1.0 release<commit_after>from setuptools import find_packages, setup
setup(
name="redshift_etl",
version="1.1.0",
author="Harry's Data Engineering and Contributors",
description="ETL code to ferry data from PostgreSQL databases or S3 files to Redshift clusters",
license="MIT",
keywords="redshift postgresql ETL ELT extract transform load",
url="https://github.com/harrystech/arthur-redshift-etl",
package_dir={"": "python"},
packages=find_packages("python"),
package_data={
"etl": [
"assets/*",
"config/*",
"render_template/templates/*"
]
},
scripts=[
"python/scripts/launch_ec2_instance.sh",
"python/scripts/launch_emr_cluster.sh",
"python/scripts/re_run_partial_pipeline.py",
"python/scripts/submit_arthur.sh"
],
entry_points={
"console_scripts": [
# NB The script must end in ".py" so that spark submit accepts it as a Python script.
"arthur.py = etl.commands:run_arg_as_command",
"run_tests.py = etl.selftest:run_tests"
]
}
)
|
185a0bad5381f256bc2968b0d225eb45d30a40cb
|
setup.py
|
setup.py
|
__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
|
__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser', 'octbrowser.history'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
|
Add history subdirectory to the package
|
Add history subdirectory to the package
|
Python
|
mit
|
karec/oct-browser,karec/oct-browser
|
__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
Add history subdirectory to the package
|
__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser', 'octbrowser.history'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
|
<commit_before>__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
<commit_msg>Add history subdirectory to the package<commit_after>
|
__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser', 'octbrowser.history'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
|
__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
Add history subdirectory to the package__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser', 'octbrowser.history'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
|
<commit_before>__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
<commit_msg>Add history subdirectory to the package<commit_after>__author__ = 'karec'
import os
from setuptools import setup
from octbrowser import __version__
BASE_DIR = os.path.abspath(os.path.dirname(__file__))
with open('README.rst') as f:
long_description = f.read()
setup(
name='octbrowser',
version=__version__,
author='Emmanuel Valette',
author_email='manu.valette@gmail.com',
packages=['octbrowser', 'octbrowser.history'],
description="A web scrapper based on lxml library.",
long_description=long_description,
url='https://github.com/karec/oct-browser',
download_url='https://github.com/karec/oct-browser/archive/master.zip',
keywords=['testing', 'mechanize', 'webscrapper', 'browser', 'web', 'lxml', 'html'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'argparse',
'requests',
'lxml',
'cssselect',
'tinycss',
'six'
]
)
|
506b686435b80fbb782403ebab5348c41689203b
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
tests_require=open('./requirements-dev.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
|
#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
|
Remove installation of dev-requirements;they don't exist anymore
|
Remove installation of dev-requirements;they don't exist anymore
|
Python
|
mit
|
raphiz/bsAbstimmungen,raphiz/bsAbstimmungen
|
#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
tests_require=open('./requirements-dev.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
Remove installation of dev-requirements;they don't exist anymore
|
#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
|
<commit_before>#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
tests_require=open('./requirements-dev.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
<commit_msg>Remove installation of dev-requirements;they don't exist anymore<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
|
#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
tests_require=open('./requirements-dev.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
Remove installation of dev-requirements;they don't exist anymore#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
|
<commit_before>#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
tests_require=open('./requirements-dev.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
<commit_msg>Remove installation of dev-requirements;they don't exist anymore<commit_after>#!/usr/bin/env python
# encoding: utf-8
import os
from setuptools import setup, find_packages
setup(
name="bsAbstimmungen",
version="0.1.0",
packages=['bsAbstimmungen'],
author="Raphael Zimmermann",
author_email="dev@raphael.li",
url="https://github.com/raphiz/bsAbstimmungen",
description="",
long_description=open('./README.md').read(),
license="MIT",
platforms=["Linux", "BSD", "MacOS"],
include_package_data=True,
zip_safe=False,
install_requires=open('./requirements.txt').read(),
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
"Programming Language :: Python :: Implementation :: CPython",
'Development Status :: 4 - Beta',
],
)
|
5e8579e7e8717ef5b95c5ec1d48c2bff5a147f50
|
setup.py
|
setup.py
|
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.1",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.2",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame>=1.9.1',
'Pyganim>=0.9.2',
'pyscroll>=2.16.6',
'PyTMX>=3.20.14',
'six>=1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
Update install_requires and inc version.
|
Update install_requires and inc version.
|
Python
|
mit
|
seventhroot/ld35
|
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.1",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
Update install_requires and inc version.
|
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.2",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame>=1.9.1',
'Pyganim>=0.9.2',
'pyscroll>=2.16.6',
'PyTMX>=3.20.14',
'six>=1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
<commit_before>import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.1",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
<commit_msg>Update install_requires and inc version.<commit_after>
|
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.2",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame>=1.9.1',
'Pyganim>=0.9.2',
'pyscroll>=2.16.6',
'PyTMX>=3.20.14',
'six>=1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.1",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
Update install_requires and inc version.import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.2",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame>=1.9.1',
'Pyganim>=0.9.2',
'pyscroll>=2.16.6',
'PyTMX>=3.20.14',
'six>=1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
<commit_before>import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.1",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame==1.9.1',
'Pyganim==0.9.2',
'pyscroll==2.16.6',
'PyTMX==3.20.14',
'six==1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
<commit_msg>Update install_requires and inc version.<commit_after>import os
from setuptools import setup
setup(
name = "ld35",
version = "0.1.2",
url = "https://github.com/seventhroot/ld35",
author = 'Seventh Root',
description = 'The Seventh Root entry for Ludum Dare 35',
long_description_markdown_filename='README.md',
packages = ['ld35'],
package_data = {'ld35': [
'assets/*.ogg',
'assets/*.wav',
'assets/*.png',
'assets/*.tmx',
'examples/*.png',
'examples/*.tmx',
]},
setup_requires=['setuptools-markdown'],
install_requires = [
'pygame>=1.9.1',
'Pyganim>=0.9.2',
'pyscroll>=2.16.6',
'PyTMX>=3.20.14',
'six>=1.10.0',
],
scripts = ['scripts/ld35game.py'],
# this is to compensate for pytmx.
# better solution may be to give it a suitable resource loader
zip_safe = False,
)
|
dd5dc52e579e8571e7c888b536c0528002345394
|
setup.py
|
setup.py
|
from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Ulric Stroetz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
|
from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Uli Strötz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
|
Fix typo in author name
|
Fix typo in author name
|
Python
|
mit
|
ustroetz/python-osrm,mthh/python-osrm,mthh/python-osrm,ustroetz/python-osrm
|
from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Ulric Stroetz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
Fix typo in author name
|
from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Uli Strötz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
|
<commit_before>from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Ulric Stroetz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
<commit_msg>Fix typo in author name<commit_after>
|
from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Uli Strötz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
|
from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Ulric Stroetz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
Fix typo in author namefrom setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Uli Strötz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
|
<commit_before>from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Ulric Stroetz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
<commit_msg>Fix typo in author name<commit_after>from setuptools import setup
#from osrm import __version__
with open("requirements.txt") as f:
requirements = f.read().split('\n')
setup(
author_email="ustroetz@gmail.com",
author="Uli Strötz, mthh",
description="A Python wrapper around the OSRM API",
install_requires=requirements,
name='osrm',
packages=['osrm'],
test_suite="tests",
url="https://github.com/ustroetz/python-osrm",
version='0.11.3'
)
|
c75923fdf53cd14eee9f977f22bffd1512288ae5
|
setup.py
|
setup.py
|
import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
|
import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
|
Add classifier indicating only Python2.7 support
|
Add classifier indicating only Python2.7 support
|
Python
|
apache-2.0
|
kderynski/napalm,mirceaulinic/napalm
|
import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
Add classifier indicating only Python2.7 support
|
import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
|
<commit_before>import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
<commit_msg>Add classifier indicating only Python2.7 support<commit_after>
|
import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
|
import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
Add classifier indicating only Python2.7 supportimport uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
|
<commit_before>import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
<commit_msg>Add classifier indicating only Python2.7 support<commit_after>import uuid
__author__ = 'David Barroso <dbarrosop@dravetech.com>'
from setuptools import setup, find_packages
from pip.req import parse_requirements
install_reqs = parse_requirements('requirements.txt', session=uuid.uuid1())
reqs = [str(ir.req) for ir in install_reqs]
setup(
name="napalm",
version="1.00.0",
packages=find_packages(),
author="David Barroso",
author_email="dbarrosop@dravetech.com",
description="Network Automation and Programmability Abstraction Layer with Multivendor support",
classifiers=[
'Topic :: Utilities',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS',
],
url="https://github.com/napalm-automation/napalm",
include_package_data=True,
install_requires=reqs,
entry_points={
'console_scripts': [
'cl_napalm_configure=napalm.clitools.cl_napalm_configure:main',
],
}
)
|
82ae36037513031fde19b7c82cd1f244c6e0c31b
|
setup.py
|
setup.py
|
import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
'rasterio>=0.12',
'rasterstats>=0.4',
'shapely>=1.3.2'
]
)
|
import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
],
extras_require={
'RasterIO': ['rasterio>=0.12', 'rasterstats>=0.4'],
'Shapely': ['shapely>=1.3.2']
}
)
|
Move RasterIO and Shapely to optional dependencies
|
Move RasterIO and Shapely to optional dependencies
|
Python
|
bsd-3-clause
|
UDST/spandex,SANDAG/spandex
|
import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
'rasterio>=0.12',
'rasterstats>=0.4',
'shapely>=1.3.2'
]
)
Move RasterIO and Shapely to optional dependencies
|
import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
],
extras_require={
'RasterIO': ['rasterio>=0.12', 'rasterstats>=0.4'],
'Shapely': ['shapely>=1.3.2']
}
)
|
<commit_before>import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
'rasterio>=0.12',
'rasterstats>=0.4',
'shapely>=1.3.2'
]
)
<commit_msg>Move RasterIO and Shapely to optional dependencies<commit_after>
|
import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
],
extras_require={
'RasterIO': ['rasterio>=0.12', 'rasterstats>=0.4'],
'Shapely': ['shapely>=1.3.2']
}
)
|
import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
'rasterio>=0.12',
'rasterstats>=0.4',
'shapely>=1.3.2'
]
)
Move RasterIO and Shapely to optional dependenciesimport os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
],
extras_require={
'RasterIO': ['rasterio>=0.12', 'rasterstats>=0.4'],
'Shapely': ['shapely>=1.3.2']
}
)
|
<commit_before>import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
'rasterio>=0.12',
'rasterstats>=0.4',
'shapely>=1.3.2'
]
)
<commit_msg>Move RasterIO and Shapely to optional dependencies<commit_after>import os.path
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
# read README as the long description
readme = 'README' if os.path.exists('README') else 'README.md'
with open(readme, 'r') as f:
long_description = f.read()
setup(
name='spandex',
version='0.1dev',
description='Spatial Analysis and Data Exploration',
long_description=long_description,
author='Synthicity',
author_email='ejanowicz@synthicity.com',
license='BSD',
url='https://github.com/synthicity/spandex',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License'
],
packages=find_packages(exclude=['*.tests']),
install_requires=[
'gdal>=1.10.1',
'numpy>=1.8.0',
'pandas>=0.13.1',
'psycopg2>=2.5.4',
],
extras_require={
'RasterIO': ['rasterio>=0.12', 'rasterstats>=0.4'],
'Shapely': ['shapely>=1.3.2']
}
)
|
f764b52558cb02b8e31b9695a724e4c4e80872dd
|
iscc_bench/readers/__init__.py
|
iscc_bench/readers/__init__.py
|
# -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
|
# -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
from iscc_bench.readers.caltech101 import caltech_101
from iscc_bench.readers.caltech256 import caltech_256
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
ALL_IMAGE_READERS = (caltech_101, caltech_256)
|
Add image readers to package scope
|
Add image readers to package scope
|
Python
|
bsd-2-clause
|
coblo/isccbench
|
# -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
Add image readers to package scope
|
# -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
from iscc_bench.readers.caltech101 import caltech_101
from iscc_bench.readers.caltech256 import caltech_256
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
ALL_IMAGE_READERS = (caltech_101, caltech_256)
|
<commit_before># -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
<commit_msg>Add image readers to package scope<commit_after>
|
# -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
from iscc_bench.readers.caltech101 import caltech_101
from iscc_bench.readers.caltech256 import caltech_256
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
ALL_IMAGE_READERS = (caltech_101, caltech_256)
|
# -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
Add image readers to package scope# -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
from iscc_bench.readers.caltech101 import caltech_101
from iscc_bench.readers.caltech256 import caltech_256
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
ALL_IMAGE_READERS = (caltech_101, caltech_256)
|
<commit_before># -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
<commit_msg>Add image readers to package scope<commit_after># -*- coding: utf-8 -*-
from iscc_bench.readers.bxbooks import bxbooks
from iscc_bench.readers.dnbrdf import dnbrdf
from iscc_bench.readers.harvard import harvard
from iscc_bench.readers.openlibrary import openlibrary
from iscc_bench.readers.libgen import libgen
from iscc_bench.readers.caltech101 import caltech_101
from iscc_bench.readers.caltech256 import caltech_256
ALL_READERS = (bxbooks, dnbrdf, harvard, openlibrary, libgen)
ALL_IMAGE_READERS = (caltech_101, caltech_256)
|
711c992a89f9a6118d2b274e2a526be62e670a92
|
examples/flask_server.py
|
examples/flask_server.py
|
from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
|
from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
|
Set content-type in flask example
|
Set content-type in flask example
|
Python
|
mit
|
bcb/jsonrpcserver
|
from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
Set content-type in flask example
|
from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
<commit_msg>Set content-type in flask example<commit_after>
|
from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
|
from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
Set content-type in flask examplefrom flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask, request # type: ignore
from jsonrpcserver import method, dispatch, Result, Success
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return dispatch(request.get_data().decode())
if __name__ == "__main__":
app.run()
<commit_msg>Set content-type in flask example<commit_after>from flask import Flask, Response, request # type: ignore
from jsonrpcserver import Result, Success, dispatch, method
app = Flask(__name__)
@method
def ping() -> Result:
return Success("pong")
@app.route("/", methods=["POST"])
def index() -> str:
return Response(
dispatch(request.get_data().decode()), content_type="application/json"
)
if __name__ == "__main__":
app.run()
|
8d01e536f0d3ce3332b3538155f0a5dd11cef16d
|
csv2ofx/mappings/gls.py
|
csv2ofx/mappings/gls.py
|
# coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
|
# coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
# Unicode marker required for python2.7
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
|
Add comment about python2.7 specific code.
|
Add comment about python2.7 specific code.
|
Python
|
mit
|
reubano/csv2ofx,reubano/csv2ofx
|
# coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
Add comment about python2.7 specific code.
|
# coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
# Unicode marker required for python2.7
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
|
<commit_before># coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
<commit_msg>Add comment about python2.7 specific code.<commit_after>
|
# coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
# Unicode marker required for python2.7
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
|
# coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
Add comment about python2.7 specific code.# coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
# Unicode marker required for python2.7
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
|
<commit_before># coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
<commit_msg>Add comment about python2.7 specific code.<commit_after># coding: utf-8
from __future__ import absolute_import
from operator import itemgetter
mapping = {
'has_header': True,
'currency': 'EUR',
'delimiter': ';',
'bank': 'GLS Bank',
'account': itemgetter('Kontonummer'),
# Chop up the dotted German date format and put it in ridiculous M/D/Y order
'date': lambda r:
r['Buchungstag'][3:5] + '/' + r['Buchungstag'][:2] + '/' +
r['Buchungstag'][-4:],
# locale.atof does not actually know how to deal with German separators.
# So we do it the crude way
'amount': lambda r: r['Betrag'].replace('.', '').replace(',', '.'),
'desc': itemgetter('Buchungstext'),
# Unicode marker required for python2.7
'payee': itemgetter(u'Auftraggeber/Empfänger'),
}
|
cfb995b21cbeac74b7ae80980ccd299c613d00db
|
ctypeslib/test/stdio.py
|
ctypeslib/test/stdio.py
|
import os
from ctypeslib.dynamic_module import include
from ctypes import *
import logging
logging.basicConfig(level=logging.INFO)
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
|
import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
|
Remove the logging setup call.
|
Remove the logging setup call.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@52678 6015fed2-1504-0410-9fe1-9d1591cc4771
|
Python
|
mit
|
luzfcb/ctypeslib,trolldbois/ctypeslib,trolldbois/ctypeslib,luzfcb/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib
|
import os
from ctypeslib.dynamic_module import include
from ctypes import *
import logging
logging.basicConfig(level=logging.INFO)
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
Remove the logging setup call.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@52678 6015fed2-1504-0410-9fe1-9d1591cc4771
|
import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
|
<commit_before>import os
from ctypeslib.dynamic_module import include
from ctypes import *
import logging
logging.basicConfig(level=logging.INFO)
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
<commit_msg>Remove the logging setup call.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@52678 6015fed2-1504-0410-9fe1-9d1591cc4771<commit_after>
|
import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
|
import os
from ctypeslib.dynamic_module import include
from ctypes import *
import logging
logging.basicConfig(level=logging.INFO)
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
Remove the logging setup call.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@52678 6015fed2-1504-0410-9fe1-9d1591cc4771import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
|
<commit_before>import os
from ctypeslib.dynamic_module import include
from ctypes import *
import logging
logging.basicConfig(level=logging.INFO)
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
<commit_msg>Remove the logging setup call.
git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@52678 6015fed2-1504-0410-9fe1-9d1591cc4771<commit_after>import os
from ctypeslib.dynamic_module import include
from ctypes import *
if os.name == "nt":
_libc = CDLL("msvcrt")
else:
_libc = CDLL(None)
include("""\
#include <stdio.h>
#ifdef _MSC_VER
# include <fcntl.h>
#else
# include <sys/fcntl.h>
#endif
""",
persist=False)
|
373c1d981ffcac9d996bcfa663463f3d8d19a75d
|
setup.py
|
setup.py
|
from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst')).read().decode('utf-8')
)
|
from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst'), 'rb').read().decode('utf-8')
)
|
Add 'rb' to open() to support python 3
|
Add 'rb' to open() to support python 3
|
Python
|
apache-2.0
|
Pulsevoid/python-scss,pistolero/python-scss,Pulsevoid/python-scss,pistolero/python-scss
|
from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst')).read().decode('utf-8')
)Add 'rb' to open() to support python 3
|
from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst'), 'rb').read().decode('utf-8')
)
|
<commit_before>from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst')).read().decode('utf-8')
)<commit_msg>Add 'rb' to open() to support python 3<commit_after>
|
from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst'), 'rb').read().decode('utf-8')
)
|
from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst')).read().decode('utf-8')
)Add 'rb' to open() to support python 3from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst'), 'rb').read().decode('utf-8')
)
|
<commit_before>from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst')).read().decode('utf-8')
)<commit_msg>Add 'rb' to open() to support python 3<commit_after>from distutils.extension import Extension
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import os.path
here = os.path.dirname(os.path.abspath(__file__))
ext_modules = [Extension("sass",
["sass.pyx"],
libraries=["sass", 'stdc++']
)]
setup(
name = 'sass',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
version = '1.0',
author = 'Sergey Kirilov',
author_email = 'sergey.kirillov@gmail.com',
url='https://github.com/pistolero/python-scss',
install_requires=['Cython'],
license="Apache License 2.0",
keywords="sass scss libsass",
description='Python bindings for libsass',
long_description=open(os.path.join(here, 'README.rst'), 'rb').read().decode('utf-8')
)
|
1d1c15107f017a5f940cfe7fb6cf344ec07268b6
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'parse-payload-doc = zeit.push.parse:print_payload_documentation',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
|
from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
|
Remove entry point for parse payload documentation
|
Remove entry point for parse payload documentation
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.push,ZeitOnline/zeit.push
|
from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'parse-payload-doc = zeit.push.parse:print_payload_documentation',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
Remove entry point for parse payload documentation
|
from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'parse-payload-doc = zeit.push.parse:print_payload_documentation',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
<commit_msg>Remove entry point for parse payload documentation<commit_after>
|
from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
|
from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'parse-payload-doc = zeit.push.parse:print_payload_documentation',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
Remove entry point for parse payload documentationfrom setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'parse-payload-doc = zeit.push.parse:print_payload_documentation',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
<commit_msg>Remove entry point for parse payload documentation<commit_after>from setuptools import setup, find_packages
setup(
name='zeit.push',
version='1.21.0.dev0',
author='gocept, Zeit Online',
author_email='zon-backend@zeit.de',
url='http://www.zeit.de/',
description="Sending push notifications through various providers",
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
zip_safe=False,
license='BSD',
namespace_packages=['zeit'],
install_requires=[
'fb',
'gocept.testing',
'grokcore.component',
'mock',
'pytz',
'requests',
'setuptools',
'tweepy',
'urbanairship >= 1.0',
'zc.sourcefactory',
'zeit.cms >= 2.102.0.dev0',
'zeit.content.article',
'zeit.content.image',
'zeit.objectlog',
'zope.app.appsetup',
'zope.component',
'zope.formlib',
'zope.interface',
'zope.schema',
],
entry_points={
'console_scripts': [
'facebook-access-token = zeit.push.facebook:create_access_token',
'ua-payload-doc = zeit.push.urbanairship:print_payload_documentation',
],
'fanstatic.libraries': [
'zeit_push=zeit.push.browser.resources:lib',
],
},
)
|
98f210e2b3fe2451b63ad6abbf50e8ab690ef5a3
|
setup.py
|
setup.py
|
import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
|
import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
|
Add python language version classifiers
|
Add python language version classifiers
|
Python
|
bsd-3-clause
|
einvalentin/elasticutils,einvalentin/elasticutils,einvalentin/elasticutils,mozilla/elasticutils,mozilla/elasticutils,mozilla/elasticutils
|
import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
Add python language version classifiers
|
import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
|
<commit_before>import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
<commit_msg>Add python language version classifiers<commit_after>
|
import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
|
import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
Add python language version classifiersimport os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
|
<commit_before>import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
<commit_msg>Add python language version classifiers<commit_after>import os
import re
from setuptools import find_packages, setup
READMEFILE = "README.rst"
VERSIONFILE = os.path.join("elasticutils", "_version.py")
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
def get_version():
verstrline = open(VERSIONFILE, "rt").read()
mo = re.search(VSRE, verstrline, re.M)
if mo:
return mo.group(1)
else:
raise RuntimeError(
"Unable to find version string in %s." % VERSIONFILE)
setup(
name='elasticutils',
version=get_version(),
description='Chainable interface to querying ElasticSearch',
long_description=open(READMEFILE).read(),
url='https://github.com/mozilla/elasticutils',
author='Mozilla Foundation and contributors',
license='BSD',
packages=find_packages(),
install_requires=['pyes>=0.15,<0.17'],
include_package_data=True,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
'Natural Language :: English',
],
)
|
e74bed03b6e27a2b25ca57dd1e02994740999951
|
setup.py
|
setup.py
|
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='w',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
Use a free name on PyPi
|
Use a free name on PyPi
|
Python
|
mit
|
Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,berkeley-cocosci/Wallace,suchow/Wallace,jcpeterson/Dallinger,suchow/Wallace,Dallinger/Dallinger,berkeley-cocosci/Wallace,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,berkeley-cocosci/Wallace,jcpeterson/Dallinger,Dallinger/Dallinger,suchow/Wallace
|
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
Use a free name on PyPi
|
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='w',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
<commit_before>"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
<commit_msg>Use a free name on PyPi<commit_after>
|
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='w',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
Use a free name on PyPi"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='w',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
<commit_before>"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='wallace-platform',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
<commit_msg>Use a free name on PyPi<commit_after>"""Install Wallace as a command line utility."""
from setuptools import setup
setup_args = dict(
name='w',
packages=['wallace'],
version="0.11.0",
description='Wallace, a platform for experimental cultural evolution',
url='http://github.com/berkeley-cocosci/Wallace',
author='Berkeley CoCoSci',
author_email='wallace@cocosci.berkeley.edu',
license='MIT',
keywords=['science', 'cultural evolution', 'experiments', 'psychology'],
classifiers=[],
zip_safe=False,
entry_points={
'console_scripts': [
'wallace = wallace.command_line:wallace',
],
}
)
# Read in requirements.txt for dependencies.
setup_args['install_requires'] = install_requires = []
setup_args['dependency_links'] = dependency_links = []
with open('requirements.txt') as f:
for line in f.readlines():
req = line.strip()
if not req or req.startswith('#'):
continue
if req.startswith('-e '):
dependency_links.append(req[3:])
else:
install_requires.append(req)
setup(**setup_args)
|
03024f24cf21fc7fa7405aee2ab1a8df6a5da783
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.11",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.12.dev",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
|
Prepare for next development cycle
|
Prepare for next development cycle
|
Python
|
mit
|
ProgramFan/bentoo
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.11",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
Prepare for next development cycle
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.12.dev",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.11",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
<commit_msg>Prepare for next development cycle<commit_after>
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.12.dev",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
|
#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.11",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
Prepare for next development cycle#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.12.dev",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.11",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
<commit_msg>Prepare for next development cycle<commit_after>#!/usr/bin/env python
# coding: utf-8
from setuptools import setup, find_packages
setup(
name="bentoo",
description="Benchmarking tools",
version="0.12.dev",
packages=find_packages(),
scripts=["scripts/bentoo-generator.py", "scripts/bentoo-runner.py",
"scripts/bentoo-collector.py", "scripts/bentoo-analyser.py",
"scripts/bentoo-metric.py", "scripts/bentoo-quickstart.py"],
package_data={
'': ['*.adoc', '*.rst', '*.md']
},
author="Zhang YANG",
author_email="zyangmath@gmail.com",
license="PSF",
keywords="Benchmark;Performance Analysis",
url="http://github.com/ProgramFan/bentoo"
)
|
3029f33a4bff225128658618ff70a4e7b955a19f
|
setup.py
|
setup.py
|
"""wal - setup.py"""
import setuptools
try:
import pywal
except (ImportError, SyntaxError):
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={
"console_scripts": ["wal=pywal.__main__:main"]
},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True
)
|
"""wal - setup.py"""
import setuptools
try:
import pywal
except ImportError:
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except (IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={"console_scripts": ["wal=pywal.__main__:main"]},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True)
|
Allow syntax errors to pass up since users should never see any.
|
Allow syntax errors to pass up since users should never see any.
This will help with debugging. Right now it just tells you that pywal needs
python 3.5 or newer.
Merge remote-tracking branch 'origin/syntax-errors' into syntax-errors
|
Python
|
mit
|
dylanaraps/pywal,dylanaraps/pywal,dylanaraps/pywal
|
"""wal - setup.py"""
import setuptools
try:
import pywal
except (ImportError, SyntaxError):
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={
"console_scripts": ["wal=pywal.__main__:main"]
},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True
)
Allow syntax errors to pass up since users should never see any.
This will help with debugging. Right now it just tells you that pywal needs
python 3.5 or newer.
Merge remote-tracking branch 'origin/syntax-errors' into syntax-errors
|
"""wal - setup.py"""
import setuptools
try:
import pywal
except ImportError:
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except (IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={"console_scripts": ["wal=pywal.__main__:main"]},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True)
|
<commit_before>"""wal - setup.py"""
import setuptools
try:
import pywal
except (ImportError, SyntaxError):
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={
"console_scripts": ["wal=pywal.__main__:main"]
},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True
)
<commit_msg>Allow syntax errors to pass up since users should never see any.
This will help with debugging. Right now it just tells you that pywal needs
python 3.5 or newer.
Merge remote-tracking branch 'origin/syntax-errors' into syntax-errors<commit_after>
|
"""wal - setup.py"""
import setuptools
try:
import pywal
except ImportError:
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except (IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={"console_scripts": ["wal=pywal.__main__:main"]},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True)
|
"""wal - setup.py"""
import setuptools
try:
import pywal
except (ImportError, SyntaxError):
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={
"console_scripts": ["wal=pywal.__main__:main"]
},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True
)
Allow syntax errors to pass up since users should never see any.
This will help with debugging. Right now it just tells you that pywal needs
python 3.5 or newer.
Merge remote-tracking branch 'origin/syntax-errors' into syntax-errors"""wal - setup.py"""
import setuptools
try:
import pywal
except ImportError:
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except (IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={"console_scripts": ["wal=pywal.__main__:main"]},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True)
|
<commit_before>"""wal - setup.py"""
import setuptools
try:
import pywal
except (ImportError, SyntaxError):
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except(IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={
"console_scripts": ["wal=pywal.__main__:main"]
},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True
)
<commit_msg>Allow syntax errors to pass up since users should never see any.
This will help with debugging. Right now it just tells you that pywal needs
python 3.5 or newer.
Merge remote-tracking branch 'origin/syntax-errors' into syntax-errors<commit_after>"""wal - setup.py"""
import setuptools
try:
import pywal
except ImportError:
print("error: pywal requires Python 3.5 or greater.")
quit(1)
try:
import pypandoc
LONG_DESC = pypandoc.convert("README.md", "rst")
except (IOError, ImportError, RuntimeError):
LONG_DESC = open('README.md').read()
VERSION = pywal.__version__
DOWNLOAD = "https://github.com/dylanaraps/pywal/archive/%s.tar.gz" % VERSION
setuptools.setup(
name="pywal",
version=VERSION,
author="Dylan Araps",
author_email="dylan.araps@gmail.com",
description="Generate and change colorschemes on the fly",
long_description=LONG_DESC,
license="MIT",
url="https://github.com/dylanaraps/pywal",
download_url=DOWNLOAD,
classifiers=[
"Environment :: X11 Applications",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
],
packages=["pywal"],
entry_points={"console_scripts": ["wal=pywal.__main__:main"]},
python_requires=">=3.5",
test_suite="tests",
include_package_data=True)
|
67dd519631e74d7b10c261ab4d987f1e314af86c
|
setup.py
|
setup.py
|
import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
|
import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.1.0-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
|
Use more sensible versioning towards first release
|
BLD: Use more sensible versioning towards first release
|
Python
|
bsd-3-clause
|
sahg/SAHGutils
|
import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
BLD: Use more sensible versioning towards first release
|
import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.1.0-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
|
<commit_before>import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
<commit_msg>BLD: Use more sensible versioning towards first release<commit_after>
|
import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.1.0-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
|
import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
BLD: Use more sensible versioning towards first releaseimport os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.1.0-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
|
<commit_before>import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.0.1-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
<commit_msg>BLD: Use more sensible versioning towards first release<commit_after>import os
import subprocess
from distutils.core import setup
try:
if os.path.exists(".git"):
s = subprocess.Popen(["git", "rev-parse", "HEAD"],
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out = s.communicate()[0]
GIT_REVISION = out.strip()
else:
GIT_REVISION = "unknown"
except WindowsError:
GIT_REVISION = "unknown"
FULL_VERSION = '0.1.0-dev'
if "dev" in FULL_VERSION:
RELEASED = False
VERSION = FULL_VERSION + '-' + GIT_REVISION[:7]
else:
RELEASED = True
VERSION = FULL_VERSION
def generate_version_py(filename):
cnt = """\
# This file was autogenerated
version = '%s'
git_revision = '%s'
"""
cnt = cnt % (VERSION, GIT_REVISION)
f = open(filename, "w")
try:
f.write(cnt)
finally:
f.close()
if __name__ == '__main__':
generate_version_py("sahgutils/__dev_version.py")
setup(
name='SAHGutils',
version=VERSION,
author='Scott Sinclair',
author_email='scott.sinclair.za@gmail.com',
packages=['sahgutils', 'sahgutils.io'],
license='LICENSE.txt',
description='Useful tools for data analysis and plots.',
long_description=open('README.txt').read(),
)
|
90e717775867a0445af1ea03455efd0db7af0de7
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0-dev",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
|
Add -dev suffix to version
|
Add -dev suffix to version
Makes it clear that there has not been an official release yet.
|
Python
|
mit
|
Muzer/smartbot,Cyanogenoid/smartbot,tomleese/smartbot,thomasleese/smartbot-old
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
Add -dev suffix to version
Makes it clear that there has not been an official release yet.
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0-dev",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
<commit_msg>Add -dev suffix to version
Makes it clear that there has not been an official release yet.<commit_after>
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0-dev",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
|
#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
Add -dev suffix to version
Makes it clear that there has not been an official release yet.#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0-dev",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
|
<commit_before>#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
<commit_msg>Add -dev suffix to version
Makes it clear that there has not been an official release yet.<commit_after>#!/usr/bin/env python3
from setuptools import setup
setup(
name="smartbot",
version="1.0.0-dev",
description="A supposedly smart IRC bot.",
url="https://github.com/tomleese/smartbot",
author="Tom Leese",
author_email="tom@tomleese.me.uk",
packages=["smartbot", "smartbot.backends", "smartbot.plugins", "smartbot.stores", "smartbot.utils"],
install_requires=[
"PyYaml",
"lxml",
"requests==2.1.0",
"isodate",
"textblob",
"twython",
"cssselect"
],
entry_points = {
"console_scripts": ["smartbot = smartbot:main"]
}
)
|
4f7c19db4d875a8e35f48c5d850d2e2b34ada86b
|
setup.py
|
setup.py
|
##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.0'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
|
##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.1'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
|
Update pennprov to 2.2.1 for Python 3.7 compatibility.
|
Update pennprov to 2.2.1 for Python 3.7 compatibility.
|
Python
|
apache-2.0
|
ieeg-portal/ieegpy
|
##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.0'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
Update pennprov to 2.2.1 for Python 3.7 compatibility.
|
##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.1'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
|
<commit_before>##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.0'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
<commit_msg>Update pennprov to 2.2.1 for Python 3.7 compatibility.<commit_after>
|
##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.1'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
|
##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.0'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
Update pennprov to 2.2.1 for Python 3.7 compatibility.##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.1'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
|
<commit_before>##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.0'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
<commit_msg>Update pennprov to 2.2.1 for Python 3.7 compatibility.<commit_after>##################################################################################
# Copyright 2013-19 by the Trustees of the University of Pennsylvania
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##################################################################################
import setuptools
from distutils.core import setup, Extension
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='ieeg',
version='1.1',
description='API for the IEEG.org platform',
install_requires=['deprecation','requests','numpy','pandas', 'pennprov==2.2.1'],
packages=setuptools.find_packages(),
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/ieeg-portal/ieegpy",
classifiers=[
'Programming Language :: Python :: 2-3',
'License :: OSI Approved :: Apache License',
'Operating System :: OS Independent',
])
|
9fbcb33089b20003e042c2f6210a68d2121913ee
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=[('/etc/init.d', ['pkg/hubble']),],
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
|
from setuptools import setup, find_packages
import platform
distro, version, _ = platform.dist()
# Default to cent7
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
if distro == 'redhat' or distro == 'centos':
if version.startswith('6'):
data_files = [('/etc/init.d', ['pkg/hubble']),]
elif version.startswith('7'):
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=data_files,
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
|
Add platform detection for centos 6/7
|
Add platform detection for centos 6/7
|
Python
|
apache-2.0
|
basepi/hubble,madchills/hubble,madchills/hubble,basepi/hubble
|
from setuptools import setup, find_packages
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=[('/etc/init.d', ['pkg/hubble']),],
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
Add platform detection for centos 6/7
|
from setuptools import setup, find_packages
import platform
distro, version, _ = platform.dist()
# Default to cent7
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
if distro == 'redhat' or distro == 'centos':
if version.startswith('6'):
data_files = [('/etc/init.d', ['pkg/hubble']),]
elif version.startswith('7'):
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=data_files,
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=[('/etc/init.d', ['pkg/hubble']),],
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
<commit_msg>Add platform detection for centos 6/7<commit_after>
|
from setuptools import setup, find_packages
import platform
distro, version, _ = platform.dist()
# Default to cent7
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
if distro == 'redhat' or distro == 'centos':
if version.startswith('6'):
data_files = [('/etc/init.d', ['pkg/hubble']),]
elif version.startswith('7'):
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=data_files,
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
|
from setuptools import setup, find_packages
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=[('/etc/init.d', ['pkg/hubble']),],
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
Add platform detection for centos 6/7from setuptools import setup, find_packages
import platform
distro, version, _ = platform.dist()
# Default to cent7
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
if distro == 'redhat' or distro == 'centos':
if version.startswith('6'):
data_files = [('/etc/init.d', ['pkg/hubble']),]
elif version.startswith('7'):
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=data_files,
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=[('/etc/init.d', ['pkg/hubble']),],
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
<commit_msg>Add platform detection for centos 6/7<commit_after>from setuptools import setup, find_packages
import platform
distro, version, _ = platform.dist()
# Default to cent7
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
if distro == 'redhat' or distro == 'centos':
if version.startswith('6'):
data_files = [('/etc/init.d', ['pkg/hubble']),]
elif version.startswith('7'):
data_files = [('/usr/lib/systemd/system', ['pkg/hubble.service']),]
setup(
name="hubblestack",
version="2.0",
packages=find_packages(),
entry_points={
'console_scripts': [
'hubble = hubble.daemon:run',
],
},
install_requires=[
'salt >= 2016.3.4',
],
data_files=data_files,
options={
# 'build_scripts': {
# 'executable': '/usr/bin/env python',
# },
'bdist_rpm': {
'requires': 'salt',
},
},
)
|
a038f3d1d194490ea9a707db314d0fd1dabeb43b
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
# import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
# long_description=read('README.rst'),
packages=find_packages(),
)
|
import os
from setuptools import setup, find_packages
import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
long_description=read('README.rst'),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, 'scripts', '*')),
)
|
Update w/ Readme & scripts
|
Update w/ Readme & scripts
|
Python
|
bsd-2-clause
|
mhahn/stacker,mhahn/stacker,federicobaldo/stacker,remind101/stacker,remind101/stacker,EnTeQuAk/stacker
|
import os
from setuptools import setup, find_packages
# import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
# long_description=read('README.rst'),
packages=find_packages(),
)
Update w/ Readme & scripts
|
import os
from setuptools import setup, find_packages
import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
long_description=read('README.rst'),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, 'scripts', '*')),
)
|
<commit_before>import os
from setuptools import setup, find_packages
# import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
# long_description=read('README.rst'),
packages=find_packages(),
)
<commit_msg>Update w/ Readme & scripts<commit_after>
|
import os
from setuptools import setup, find_packages
import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
long_description=read('README.rst'),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, 'scripts', '*')),
)
|
import os
from setuptools import setup, find_packages
# import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
# long_description=read('README.rst'),
packages=find_packages(),
)
Update w/ Readme & scriptsimport os
from setuptools import setup, find_packages
import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
long_description=read('README.rst'),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, 'scripts', '*')),
)
|
<commit_before>import os
from setuptools import setup, find_packages
# import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
# long_description=read('README.rst'),
packages=find_packages(),
)
<commit_msg>Update w/ Readme & scripts<commit_after>import os
from setuptools import setup, find_packages
import glob
src_dir = os.path.dirname(__file__)
def read(filename):
full_path = os.path.join(src_dir, filename)
with open(full_path) as fd:
return fd.read()
if __name__ == '__main__':
setup(
name='stacker',
version='0.1.0',
author='Michael Barrett',
author_email='loki77@gmail.com',
license="New BSD license",
url="https://github.com/remind101/stacker",
description='Opinionated AWS CloudFormation Stack manager',
long_description=read('README.rst'),
packages=find_packages(),
scripts=glob.glob(os.path.join(src_dir, 'scripts', '*')),
)
|
f545e1a6d95c22da7f941887d9b8823f3cdd5822
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
|
from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
author='Zardus',
author_email='zardus@gmail.com',
maintainer='rhelmot',
maintainer_email='audrey@rhelmot.io',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
|
Add author/maintainer information to keep pypi happy
|
Add author/maintainer information to keep pypi happy
|
Python
|
bsd-2-clause
|
zardus/idalink
|
from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
Add author/maintainer information to keep pypi happy
|
from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
author='Zardus',
author_email='zardus@gmail.com',
maintainer='rhelmot',
maintainer_email='audrey@rhelmot.io',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
<commit_msg>Add author/maintainer information to keep pypi happy<commit_after>
|
from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
author='Zardus',
author_email='zardus@gmail.com',
maintainer='rhelmot',
maintainer_email='audrey@rhelmot.io',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
|
from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
Add author/maintainer information to keep pypi happyfrom setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
author='Zardus',
author_email='zardus@gmail.com',
maintainer='rhelmot',
maintainer_email='audrey@rhelmot.io',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
<commit_msg>Add author/maintainer information to keep pypi happy<commit_after>from setuptools import setup, find_packages
setup(
name='idalink',
description='An interface to the insides of IDA!',
long_description=open('README.md').read(),
version='0.11',
url='https://github.com/zardus/idalink',
license='GNU General Public License v3',
author='Zardus',
author_email='zardus@gmail.com',
maintainer='rhelmot',
maintainer_email='audrey@rhelmot.io',
packages=find_packages(),
install_requires=[
'rpyc',
],
)
|
6f4508102dff2db380434d7cf2a00a2cc3141731
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
),
scripts=['bin/dotfiles'],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
),
scripts=['bin/dotfiles'],
)
|
Add specific Python version trove classifiers
|
Add specific Python version trove classifiers
|
Python
|
isc
|
Bklyn/dotfiles,aparente/Dotfiles,aparente/Dotfiles,aparente/Dotfiles,nilehmann/dotfiles-1,aparente/Dotfiles
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
),
scripts=['bin/dotfiles'],
)
Add specific Python version trove classifiers
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
),
scripts=['bin/dotfiles'],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
),
scripts=['bin/dotfiles'],
)
<commit_msg>Add specific Python version trove classifiers<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
),
scripts=['bin/dotfiles'],
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
),
scripts=['bin/dotfiles'],
)
Add specific Python version trove classifiers#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
),
scripts=['bin/dotfiles'],
)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
),
scripts=['bin/dotfiles'],
)
<commit_msg>Add specific Python version trove classifiers<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from dotfiles.core import __version__
if sys.argv[-1] == "publish":
os.system("python setup.py sdist upload")
sys.exit()
if sys.argv[-1] == "test":
os.system("python test_dotfiles.py")
sys.exit()
setup(name='dotfiles',
version=__version__,
description='Easily manage your dotfiles',
long_description=open('README.rst').read() + '\n\n' +
open('LICENSE.rst').read() + '\n\n' +
open('HISTORY.rst').read(),
author='Jon Bernard',
author_email='jbernard@tuxion.com',
url='https://github.com/jbernard/dotfiles',
packages=['dotfiles'],
license='ISC',
classifiers=(
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: ISC License (ISCL)'
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
),
scripts=['bin/dotfiles'],
)
|
7a7e824b63c4498ee12c59a6af459e6fe8639003
|
server.py
|
server.py
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
import bottle
from cso_parser import CsoParser
import waitress
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
from breathe import Breathe
from controller import Controller
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = Breathe()
cso_parser = CsoParser()
my_controller = Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
@scheduler.scheduled_job(trigger='cron', hour=1, minute=30)
def cso_job():
"""Get CSO data at 1:30am and update the breather with the current status."""
print("Fetch CSO status and update breather.")
cso_parser.update()
if cso_parser.now_count or cso_parser.recent_count:
breather.erratic()
else:
breather.calm()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
Add scheduled cso_job method - Retrieves the CSO status and updates the breathe rate
|
Add scheduled cso_job method
- Retrieves the CSO status and updates the breathe rate
|
Python
|
mit
|
tipsqueal/duwamish-lighthouse,tipsqueal/duwamish-lighthouse,illumenati/duwamish-lighthouse,illumenati/duwamish-lighthouse
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
Add scheduled cso_job method
- Retrieves the CSO status and updates the breathe rate
|
import bottle
from cso_parser import CsoParser
import waitress
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
from breathe import Breathe
from controller import Controller
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = Breathe()
cso_parser = CsoParser()
my_controller = Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
@scheduler.scheduled_job(trigger='cron', hour=1, minute=30)
def cso_job():
"""Get CSO data at 1:30am and update the breather with the current status."""
print("Fetch CSO status and update breather.")
cso_parser.update()
if cso_parser.now_count or cso_parser.recent_count:
breather.erratic()
else:
breather.calm()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
<commit_before>import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
<commit_msg>Add scheduled cso_job method
- Retrieves the CSO status and updates the breathe rate<commit_after>
|
import bottle
from cso_parser import CsoParser
import waitress
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
from breathe import Breathe
from controller import Controller
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = Breathe()
cso_parser = CsoParser()
my_controller = Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
@scheduler.scheduled_job(trigger='cron', hour=1, minute=30)
def cso_job():
"""Get CSO data at 1:30am and update the breather with the current status."""
print("Fetch CSO status and update breather.")
cso_parser.update()
if cso_parser.now_count or cso_parser.recent_count:
breather.erratic()
else:
breather.calm()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
Add scheduled cso_job method
- Retrieves the CSO status and updates the breathe rateimport bottle
from cso_parser import CsoParser
import waitress
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
from breathe import Breathe
from controller import Controller
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = Breathe()
cso_parser = CsoParser()
my_controller = Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
@scheduler.scheduled_job(trigger='cron', hour=1, minute=30)
def cso_job():
"""Get CSO data at 1:30am and update the breather with the current status."""
print("Fetch CSO status and update breather.")
cso_parser.update()
if cso_parser.now_count or cso_parser.recent_count:
breather.erratic()
else:
breather.calm()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
<commit_before>import bottle
import waitress
import controller
import breathe
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = breathe.Breathe()
my_controller = controller.Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
<commit_msg>Add scheduled cso_job method
- Retrieves the CSO status and updates the breathe rate<commit_after>import bottle
from cso_parser import CsoParser
import waitress
from pytz import timezone
from apscheduler.schedulers.background import BackgroundScheduler
from breathe import Breathe
from controller import Controller
bottle_app = bottle.app()
scheduler = BackgroundScheduler()
scheduler.configure(timezone=timezone('US/Pacific'))
breather = Breathe()
cso_parser = CsoParser()
my_controller = Controller(bottle_app, breather)
@scheduler.scheduled_job(trigger='cron', hour=17, minute=30)
def on_job():
"""Start at 7:00pm PT"""
print('STARTING BREATHER')
breather.restart()
@scheduler.scheduled_job(trigger='cron', hour=19, minute=30)
def off_job():
"""End at 9:00pm PT"""
print("STOPPING BREATHER")
breather.stop()
@scheduler.scheduled_job(trigger='cron', hour=1, minute=30)
def cso_job():
"""Get CSO data at 1:30am and update the breather with the current status."""
print("Fetch CSO status and update breather.")
cso_parser.update()
if cso_parser.now_count or cso_parser.recent_count:
breather.erratic()
else:
breather.calm()
if __name__ == '__main__':
scheduler.start()
waitress.serve(bottle_app, host='0.0.0.0', port=7000)
|
6382a5d47f720d62c596a9f7dd24f6d0aa9dff55
|
plugins/invitejoiner/invitejoiner.py
|
plugins/invitejoiner/invitejoiner.py
|
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
Invitejoiner.run()
|
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
if __name__ == "__main__":
sys.exit(Invitejoiner.run())
|
Fix starting the Invitejoiner plugin
|
Fix starting the Invitejoiner plugin
|
Python
|
mit
|
Tigge/platinumshrimp
|
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
Invitejoiner.run()
Fix starting the Invitejoiner plugin
|
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
if __name__ == "__main__":
sys.exit(Invitejoiner.run())
|
<commit_before>
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
Invitejoiner.run()
<commit_msg>Fix starting the Invitejoiner plugin<commit_after>
|
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
if __name__ == "__main__":
sys.exit(Invitejoiner.run())
|
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
Invitejoiner.run()
Fix starting the Invitejoiner plugin
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
if __name__ == "__main__":
sys.exit(Invitejoiner.run())
|
<commit_before>
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
Invitejoiner.run()
<commit_msg>Fix starting the Invitejoiner plugin<commit_after>
import plugin
from twisted.python import log
class Invitejoiner(plugin.Plugin):
def __init__(self):
plugin.Plugin.__init__(self, "Invitejoiner")
def invited(self, server_id, channel):
log.msg("Invited to: ", channel)
self.join(server_id, channel)
if __name__ == "__main__":
sys.exit(Invitejoiner.run())
|
e4b37ddc8802386572aa496e8b37ca647839aac7
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='tango-shared-core',
version='0.6.4',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('docs/requirements.txt') as f:
required = f.read().splitlines()
setup(
name='tango-shared-core',
version='0.6.5',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
|
Fix for requirements, and increment version
|
Fix for requirements, and increment version
|
Python
|
mit
|
tBaxter/tango-shared-core,tBaxter/tango-shared-core,tBaxter/tango-shared-core
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='tango-shared-core',
version='0.6.4',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
Fix for requirements, and increment version
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('docs/requirements.txt') as f:
required = f.read().splitlines()
setup(
name='tango-shared-core',
version='0.6.5',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
|
<commit_before># -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='tango-shared-core',
version='0.6.4',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
<commit_msg>Fix for requirements, and increment version<commit_after>
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('docs/requirements.txt') as f:
required = f.read().splitlines()
setup(
name='tango-shared-core',
version='0.6.5',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='tango-shared-core',
version='0.6.4',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
Fix for requirements, and increment version# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('docs/requirements.txt') as f:
required = f.read().splitlines()
setup(
name='tango-shared-core',
version='0.6.5',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
|
<commit_before># -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name='tango-shared-core',
version='0.6.4',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
<commit_msg>Fix for requirements, and increment version<commit_after># -*- coding: utf-8 -*-
from setuptools import setup, find_packages
with open('docs/requirements.txt') as f:
required = f.read().splitlines()
setup(
name='tango-shared-core',
version='0.6.5',
author=u'Tim Baxter',
author_email='mail.baxter@gmail.com',
description='Tango shared/core functionality.',
long_description=open('README.md').read(),
url='https://github.com/tBaxter/tango-shared-core',
license='LICENSE',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
dependency_links = [
'http://github.com/tBaxter/django-voting/tarball/master#egg=tango-voting',
]
)
|
3970a8e84c8a541dc69687c36388a829eb36b29f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
license="public domain",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
]
)
|
from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
],
classifiers=[
'License :: Public Domain',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication'
]
)
|
Use CC0 and Public Domain for license
|
Use CC0 and Public Domain for license
|
Python
|
cc0-1.0
|
grapesmoker/regulations-site,ascott1/regulations-site,ascott1/regulations-site,willbarton/regulations-site,willbarton/regulations-site,willbarton/regulations-site,willbarton/regulations-site,grapesmoker/regulations-site,grapesmoker/regulations-site,ascott1/regulations-site,grapesmoker/regulations-site,ascott1/regulations-site
|
from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
license="public domain",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
]
)
Use CC0 and Public Domain for license
|
from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
],
classifiers=[
'License :: Public Domain',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication'
]
)
|
<commit_before>from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
license="public domain",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
]
)
<commit_msg>Use CC0 and Public Domain for license<commit_after>
|
from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
],
classifiers=[
'License :: Public Domain',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication'
]
)
|
from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
license="public domain",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
]
)
Use CC0 and Public Domain for licensefrom setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
],
classifiers=[
'License :: Public Domain',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication'
]
)
|
<commit_before>from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
license="public domain",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
]
)
<commit_msg>Use CC0 and Public Domain for license<commit_after>from setuptools import setup, find_packages
import os
from subprocess import call
from setuptools import Command
from distutils.command.build_ext import build_ext as _build_ext
from setuptools.command.bdist_egg import bdist_egg as _bdist_egg
class build_frontend(Command):
""" A command class to run `frontendbuild.sh` """
description = 'build front-end JavaScript and CSS'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
print __file__
call(['./frontendbuild.sh'],
cwd=os.path.dirname(os.path.abspath(__file__)))
class build_ext(_build_ext):
""" A build_ext subclass that adds build_frontend """
def run(self):
self.run_command('build_frontend')
_build_ext.run(self)
class bdist_egg(_bdist_egg):
""" A bdist_egg subclass that runs build_frontend """
def run(self):
self.run_command('build_frontend')
_bdist_egg.run(self)
setup(
name="regulations",
version="2.0.0",
packages=find_packages(),
cmdclass={
'build_frontend': build_frontend,
'build_ext': build_ext,
'bdist_egg': bdist_egg,
},
install_requires=[
'django==1.8',
'lxml',
'requests'
],
classifiers=[
'License :: Public Domain',
'License :: CC0 1.0 Universal (CC0 1.0) Public Domain Dedication'
]
)
|
23334c4598e112d66ebab7702854a40eb4115c5c
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
|
from setuptools import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
|
Allow to install in development mode
|
Allow to install in development mode
User can now install with python setup.py develop
|
Python
|
mit
|
kevinburke/hamms
|
from distutils.core import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
Allow to install in development mode
User can now install with python setup.py develop
|
from setuptools import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
|
<commit_before>from distutils.core import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
<commit_msg>Allow to install in development mode
User can now install with python setup.py develop<commit_after>
|
from setuptools import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
|
from distutils.core import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
Allow to install in development mode
User can now install with python setup.py developfrom setuptools import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
|
<commit_before>from distutils.core import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
<commit_msg>Allow to install in development mode
User can now install with python setup.py develop<commit_after>from setuptools import setup
setup(
name='hamms',
packages=['hamms'],
version='0.6',
description='Malformed servers to test your HTTP client',
author='Kevin Burke',
author_email='kev@inburke.com',
url='https://github.com/kevinburke/hamms',
keywords=['testing', 'server', 'http',],
# XXX, pin these down
install_requires=['flask', 'httpbin', 'twisted'],
)
|
c39dc041c4ef68808cc7df965321d904960863b8
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
Remove gdata dependency from install reqs
|
Remove gdata dependency from install reqs
|
Python
|
agpl-3.0
|
openhatch/oh-bugimporters,openhatch/oh-bugimporters,openhatch/oh-bugimporters
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
Remove gdata dependency from install reqs
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
<commit_before>#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
<commit_msg>Remove gdata dependency from install reqs<commit_after>
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
Remove gdata dependency from install reqs#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
<commit_before>#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'gdata',
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
<commit_msg>Remove gdata dependency from install reqs<commit_after>#!/usr/bin/env python
from setuptools import find_packages, Command
setup_params = dict(
name='bugimporters',
version=0.1,
author='Various contributers to the OpenHatch project, Berry Phillips',
author_email='all@openhatch.org, berryphillips@gmail.com',
packages=find_packages(),
description='Bug importers for the OpenHatch project',
install_requires=[
'lxml',
'cssselect',
'pyopenssl',
'unicodecsv',
'feedparser',
'twisted',
'python-dateutil',
'decorator',
'scrapy>0.9',
'argparse',
'mock',
'PyYAML',
'autoresponse>=0.3.1',
],
)
### Python 2.7 already has importlib. Because of that,
### we can't put it in install_requires. We test for
### that here; if needed, we add it.
try:
import importlib
except ImportError:
setup_params['install_requires'].append('importlib')
if __name__ == '__main__':
from setuptools import setup
setup(**setup_params)
|
7d20d17229f11ddba104d4e5d4fcfc65d56e0102
|
setup.py
|
setup.py
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/mredar/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/barbarahui/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
|
Change ucldc-iiif back to barbara's repo
|
Change ucldc-iiif back to barbara's repo
|
Python
|
bsd-3-clause
|
barbarahui/nuxeo-calisphere,barbarahui/nuxeo-calisphere
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/mredar/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
Change ucldc-iiif back to barbara's repo
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/barbarahui/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
|
<commit_before>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/mredar/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
<commit_msg>Change ucldc-iiif back to barbara's repo<commit_after>
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/barbarahui/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
|
import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/mredar/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
Change ucldc-iiif back to barbara's repoimport os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/barbarahui/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
|
<commit_before>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/mredar/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
<commit_msg>Change ucldc-iiif back to barbara's repo<commit_after>import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name = "UCLDC Deep Harvester",
version = "0.0.3",
description = ("deep harvester code for the UCLDC project"),
long_description=read('README.md'),
author='Barbara Hui',
author_email='barbara.hui@ucop.edu',
dependency_links=[
'https://github.com/ucldc/pynux/archive/master.zip#egg=pynux',
'https://github.com/mredar/jsonpath/archive/master.zip#egg=jsonpath',
'https://github.com/barbarahui/ucldc-iiif/archive/master.zip#egg=ucldc-iiif'
],
install_requires=[
'argparse',
'boto',
'pynux',
'python-magic',
'couchdb',
'jsonpath',
'akara',
'ucldc-iiif'
],
packages=['deepharvest', 's3stash'],
test_suite='tests'
)
### note: dpla-ingestion code is a dependency
###pip_main(['install',
### 'git+ssh://git@bitbucket.org/mredar/dpla-ingestion.git@ucldc'])
|
7c2867be212d912e6d49995665d155474dbb2e48
|
setup.py
|
setup.py
|
"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'hashlib',
'ofx_tools',
'optparse',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
|
"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'ofxtools',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
|
Remove stuff that doesn't need to be here.
|
Remove stuff that doesn't need to be here.
|
Python
|
unlicense
|
cgiacofei/pyledgertools,cgiacofei/pyledgertools
|
"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'hashlib',
'ofx_tools',
'optparse',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
Remove stuff that doesn't need to be here.
|
"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'ofxtools',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
|
<commit_before>"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'hashlib',
'ofx_tools',
'optparse',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
<commit_msg>Remove stuff that doesn't need to be here.<commit_after>
|
"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'ofxtools',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
|
"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'hashlib',
'ofx_tools',
'optparse',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
Remove stuff that doesn't need to be here."""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'ofxtools',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
|
<commit_before>"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'hashlib',
'ofx_tools',
'optparse',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
<commit_msg>Remove stuff that doesn't need to be here.<commit_after>"""pyledgertools setup file"""
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='pyledgertools',
version='0.1',
description='Python based tools for ledger accounting.',
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: Public Domain',
'Programming Language :: Python :: 3.5',
'Topic :: Office/Business :: Financial :: Accounting',
],
keywords='ledger-cli plaintextaccounting ofx',
url='http://github.com/cgiacofei/pyledgertools',
author='Chris Giacofei',
author_email='c.giacofei@gmail.com',
license='Public Domain',
packages=['pyledgertools'],
install_requires=[
'ofxtools',
'PyYaml',
],
include_package_data=True,
zip_safe=False
)
|
0c58182ef8e82bb3f7a6c26e03eec0de9cbd35d4
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.2',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.3',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
|
Bump up version to 0.3
|
Bump up version to 0.3
|
Python
|
bsd-2-clause
|
hivelocity/djiki,hivelocity/djiki
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.2',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
Bump up version to 0.3
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.3',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
|
<commit_before># -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.2',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
<commit_msg>Bump up version to 0.3<commit_after>
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.3',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
|
# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.2',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
Bump up version to 0.3# -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.3',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
|
<commit_before># -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.2',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
<commit_msg>Bump up version to 0.3<commit_after># -*- coding: utf-8 -*-
from distutils.core import setup
from setuptools import find_packages
setup(
name = 'djiki',
version = '0.3',
description = 'Django Wiki Application',
url = 'https://github.com/emesik/djiki/',
long_description = open('README.rst').read(),
author = 'Michał Sałaban',
author_email = 'michal@salaban.info',
requires = [
'creole',
'diff_match_patch',
'sorl_thumbnail',
],
packages = find_packages(),
include_package_data = True,
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
zip_safe = False,
)
|
f2eb6913787bc933c70fca1c1d98058be88d739f
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b1',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
|
#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b2',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
|
Update package version to 2.0.0-beta.2
|
Update package version to 2.0.0-beta.2
|
Python
|
mit
|
caleb531/alfred-workflow-packager
|
#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b1',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
Update package version to 2.0.0-beta.2
|
#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b2',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
|
<commit_before>#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b1',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
<commit_msg>Update package version to 2.0.0-beta.2<commit_after>
|
#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b2',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
|
#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b1',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
Update package version to 2.0.0-beta.2#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b2',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
|
<commit_before>#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b1',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
<commit_msg>Update package version to 2.0.0-beta.2<commit_after>#!/usr/bin/env python3
# coding=utf-8
from setuptools import setup
# Get long description (used on PyPI project page)
def get_long_description():
with open('README.md', 'r') as readme_file:
return readme_file.read()
setup(
name='alfred-workflow-packager',
version='2.0.0b2',
description='A CLI utility for packaging and exporting Alfred workflows',
long_description=get_long_description(),
long_description_content_type='text/markdown',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
package_data={
'awp': ['data/config-schema.json']
},
install_requires=[
'jsonschema >= 4, < 5'
],
entry_points={
'console_scripts': [
'awp=awp.main:main'
]
}
)
|
0f4c1b69753450802c1e8a438544b41cf705e92a
|
backend/api_access/main.py
|
backend/api_access/main.py
|
import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print(gn.getNeighborhood(lat,long))
print(gn.getPOI(lat, long))
|
import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print("District: " + gn.getNeighborhood(lat,long)+ "; POI: " + gn.getPOI(lat, long) )
|
Change the way data is printed
|
Change the way data is printed
|
Python
|
mit
|
nikha1/nyc-taxi,nikha1/nyc-taxi,nikha1/nyc-taxi
|
import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print(gn.getNeighborhood(lat,long))
print(gn.getPOI(lat, long))Change the way data is printed
|
import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print("District: " + gn.getNeighborhood(lat,long)+ "; POI: " + gn.getPOI(lat, long) )
|
<commit_before>import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print(gn.getNeighborhood(lat,long))
print(gn.getPOI(lat, long))<commit_msg>Change the way data is printed<commit_after>
|
import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print("District: " + gn.getNeighborhood(lat,long)+ "; POI: " + gn.getPOI(lat, long) )
|
import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print(gn.getNeighborhood(lat,long))
print(gn.getPOI(lat, long))Change the way data is printedimport pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print("District: " + gn.getNeighborhood(lat,long)+ "; POI: " + gn.getPOI(lat, long) )
|
<commit_before>import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print(gn.getNeighborhood(lat,long))
print(gn.getPOI(lat, long))<commit_msg>Change the way data is printed<commit_after>import pandas as pd
import geonames as gn
#Enter csv path
path = "/Users/larshelin/Documents/PycharmProjects/CEP/nyc-taxi/backend/parser/trips_shortend.csv"
#Open Dataframe
df = pd.read_csv(path)
for i in range(0,10):
# Get Latitude and Longitude
lat = df.ix[i]['pickup_latitude']
long = df.ix[i]['pickup_longitude']
print("District: " + gn.getNeighborhood(lat,long)+ "; POI: " + gn.getPOI(lat, long) )
|
91e8878764fd9914d56b01da7b8bbbbb37258a20
|
tests.py
|
tests.py
|
#!/usr/bin/python -O
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
|
#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][3], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
|
Test script to UTF-8 and output category.
|
Test script to UTF-8 and output category.
|
Python
|
mit
|
dangoldin/jeopardy-parser,dangoldin/jeopardy-parser,whymarrh/jeopardy-parser
|
#!/usr/bin/python -O
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
Test script to UTF-8 and output category.
|
#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][3], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python -O
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
<commit_msg>Test script to UTF-8 and output category.<commit_after>
|
#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][3], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
|
#!/usr/bin/python -O
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
Test script to UTF-8 and output category.#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][3], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python -O
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
<commit_msg>Test script to UTF-8 and output category.<commit_after>#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for checking. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game id numbers
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("select * from clues where game = ?", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
print meta.rjust(5),
row = randrange(0, len(rows))
print rows[row][2], "->", rows[row][3], "->", rows[row][5], "->", rows[row][6]
if __name__ == "__main__":
main()
|
fcffabef406cd0d983e4754c58c76760f0204357
|
pywikibot/families/commons_family.py
|
pywikibot/families/commons_family.py
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
def shared_data_repository(self, code, transcluded=False):
return ('wikidata', 'wikidata')
|
Enable Wikidata for Wikimedia Commons
|
Enable Wikidata for Wikimedia Commons
Change-Id: Ibc8734f65dcd97dc7af9674efe8655fe01dc61d3
|
Python
|
mit
|
smalyshev/pywikibot-core,Darkdadaah/pywikibot-core,npdoty/pywikibot,jayvdb/pywikibot-core,magul/pywikibot-core,VcamX/pywikibot-core,h4ck3rm1k3/pywikibot-core,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,darthbhyrava/pywikibot-local,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,Darkdadaah/pywikibot-core,emijrp/pywikibot-core,valhallasw/pywikibot-core,PersianWikipedia/pywikibot-core,icyflame/batman,hasteur/g13bot_tools_new,xZise/pywikibot-core,trishnaguha/pywikibot-core,jayvdb/pywikibot-core,h4ck3rm1k3/pywikibot-core,happy5214/pywikibot-core,TridevGuha/pywikibot-core,magul/pywikibot-core,npdoty/pywikibot,happy5214/pywikibot-core
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
Enable Wikidata for Wikimedia Commons
Change-Id: Ibc8734f65dcd97dc7af9674efe8655fe01dc61d3
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
def shared_data_repository(self, code, transcluded=False):
return ('wikidata', 'wikidata')
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
<commit_msg>Enable Wikidata for Wikimedia Commons
Change-Id: Ibc8734f65dcd97dc7af9674efe8655fe01dc61d3<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
def shared_data_repository(self, code, transcluded=False):
return ('wikidata', 'wikidata')
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
Enable Wikidata for Wikimedia Commons
Change-Id: Ibc8734f65dcd97dc7af9674efe8655fe01dc61d3# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
def shared_data_repository(self, code, transcluded=False):
return ('wikidata', 'wikidata')
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
<commit_msg>Enable Wikidata for Wikimedia Commons
Change-Id: Ibc8734f65dcd97dc7af9674efe8655fe01dc61d3<commit_after># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = 'commons'
self.langs = {
'commons': 'commons.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
self.category_redirect_templates = {
'commons': (u'Category redirect',
u'Categoryredirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
u'See cat',
u'Seecat',
u'See category',
u'Catredirect',
u'Cat redirect',
u'Cat-red',
u'Catredir',
u'Redirect category'),
}
self.disambcatname = {
'commons': u'Disambiguation'
}
def ssl_pathprefix(self, code):
return "/wikipedia/commons"
def shared_data_repository(self, code, transcluded=False):
return ('wikidata', 'wikidata')
|
9fa7d9dfe4218f919133efae8495e4bcb16ccc5e
|
pastas/recharge/recharge_func.py
|
pastas/recharge/recharge_func.py
|
"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -5.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
|
"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -2.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
|
Change parameters to match StressModel2
|
Change parameters to match StressModel2
and so the results.
|
Python
|
mit
|
gwtsa/gwtsa,pastas/pasta,pastas/pastas
|
"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -5.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
Change parameters to match StressModel2
and so the results.
|
"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -2.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
|
<commit_before>"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -5.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
<commit_msg>Change parameters to match StressModel2
and so the results.<commit_after>
|
"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -2.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
|
"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -5.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
Change parameters to match StressModel2
and so the results."""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -2.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
|
<commit_before>"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -5.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
<commit_msg>Change parameters to match StressModel2
and so the results.<commit_after>"""recharge_func module
Author: R.A. Collenteur
Contains the classes for the different models that are available to calculate
the recharge from evaporation and precipitation data.
Each Recharge class contains at least the following:
Attributes
----------
nparam: int
Number of parameters needed for this model.
Functions
---------
set_parameters(self, name)
A function that returns a Pandas DataFrame of the parameters of the
recharge function. Columns of the dataframe need to be ['value', 'pmin',
'pmax', 'vary']. Rows of the DataFrame have names of the parameters. Input
name is used as a prefix. This function is called by a Tseries object.
simulate(self, evap, prec, p=None)
A function that returns an array of the simulated recharge series.
"""
import pandas as pd
class Linear:
"""Linear recharge model
The recharge to the groundwater is calculated as:
R = P - f * E
"""
def __init__(self):
self.nparam = 1
def set_parameters(self, name):
parameters = pd.DataFrame(
columns=['initial', 'pmin', 'pmax', 'vary', 'name'])
parameters.loc[name + '_f'] = (-1.0, -2.0, 0.0, 1, name)
return parameters
def simulate(self, precip, evap, p=None):
recharge = precip + p * evap
return recharge
|
73d444c234ddb734ac14b688f6542750ea09de78
|
api/init/graphqlapi/routes.py
|
api/init/graphqlapi/routes.py
|
from graphqlapi.proxy import proxy_request
from graphqlapi.interceptor import RequestException
from flask_restplus import Resource, fields, Namespace, Api
from docker.errors import APIError
from flask import request, jsonify, make_response
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
|
from docker.errors import APIError
from flask import request, jsonify, make_response
from flask_restplus import Resource, fields, Namespace, Api
from graphqlapi.exceptions import RequestException
from graphqlapi.proxy import proxy_request
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
|
Reorder imports in alphabetical order
|
Reorder imports in alphabetical order
|
Python
|
apache-2.0
|
alexisrolland/data-quality,alexisrolland/data-quality,alexisrolland/data-quality,alexisrolland/data-quality
|
from graphqlapi.proxy import proxy_request
from graphqlapi.interceptor import RequestException
from flask_restplus import Resource, fields, Namespace, Api
from docker.errors import APIError
from flask import request, jsonify, make_response
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
Reorder imports in alphabetical order
|
from docker.errors import APIError
from flask import request, jsonify, make_response
from flask_restplus import Resource, fields, Namespace, Api
from graphqlapi.exceptions import RequestException
from graphqlapi.proxy import proxy_request
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
|
<commit_before>from graphqlapi.proxy import proxy_request
from graphqlapi.interceptor import RequestException
from flask_restplus import Resource, fields, Namespace, Api
from docker.errors import APIError
from flask import request, jsonify, make_response
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
<commit_msg>Reorder imports in alphabetical order<commit_after>
|
from docker.errors import APIError
from flask import request, jsonify, make_response
from flask_restplus import Resource, fields, Namespace, Api
from graphqlapi.exceptions import RequestException
from graphqlapi.proxy import proxy_request
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
|
from graphqlapi.proxy import proxy_request
from graphqlapi.interceptor import RequestException
from flask_restplus import Resource, fields, Namespace, Api
from docker.errors import APIError
from flask import request, jsonify, make_response
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
Reorder imports in alphabetical orderfrom docker.errors import APIError
from flask import request, jsonify, make_response
from flask_restplus import Resource, fields, Namespace, Api
from graphqlapi.exceptions import RequestException
from graphqlapi.proxy import proxy_request
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
|
<commit_before>from graphqlapi.proxy import proxy_request
from graphqlapi.interceptor import RequestException
from flask_restplus import Resource, fields, Namespace, Api
from docker.errors import APIError
from flask import request, jsonify, make_response
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
<commit_msg>Reorder imports in alphabetical order<commit_after>from docker.errors import APIError
from flask import request, jsonify, make_response
from flask_restplus import Resource, fields, Namespace, Api
from graphqlapi.exceptions import RequestException
from graphqlapi.proxy import proxy_request
def register_graphql(namespace: Namespace, api: Api):
"""Method used to register the GraphQL namespace and endpoint."""
# Create expected headers and payload
headers = api.parser()
payload = api.model('Payload', {'query': fields.String(
required=True,
description='GraphQL query or mutation',
example='{allIndicatorTypes{nodes{id,name}}}')})
@namespace.route('/graphql', endpoint='with-parser')
@namespace.doc()
class GraphQL(Resource):
@namespace.expect(headers, payload, validate=True)
def post(self):
"""
Execute GraphQL queries and mutations
Use this endpoint to send http request to the GraphQL API.
"""
payload = request.json
try:
status, response = proxy_request(payload)
return make_response(jsonify(response), status)
except RequestException as ex:
return ex.to_response()
except APIError as apiError:
return make_response(jsonify({'message': apiError.explanation}), apiError.status_code)
|
d88c2b8c99c57a434209741a65bdb2751415ec3f
|
setuptools/command/install_scripts.py
|
setuptools/command/install_scripts.py
|
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
_install_scripts.run(self) # run first to set up self.outfiles
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
|
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
|
Fix "legacy mode" trying to install scripts when there are none.
|
Fix "legacy mode" trying to install scripts when there are none.
--HG--
branch : setuptools
extra : convert_revision : svn%3A6015fed2-1504-0410-9fe1-9d1591cc4771/sandbox/trunk/setuptools%4041777
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
_install_scripts.run(self) # run first to set up self.outfiles
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
Fix "legacy mode" trying to install scripts when there are none.
--HG--
branch : setuptools
extra : convert_revision : svn%3A6015fed2-1504-0410-9fe1-9d1591cc4771/sandbox/trunk/setuptools%4041777
|
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
|
<commit_before>from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
_install_scripts.run(self) # run first to set up self.outfiles
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
<commit_msg>Fix "legacy mode" trying to install scripts when there are none.
--HG--
branch : setuptools
extra : convert_revision : svn%3A6015fed2-1504-0410-9fe1-9d1591cc4771/sandbox/trunk/setuptools%4041777<commit_after>
|
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
|
from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
_install_scripts.run(self) # run first to set up self.outfiles
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
Fix "legacy mode" trying to install scripts when there are none.
--HG--
branch : setuptools
extra : convert_revision : svn%3A6015fed2-1504-0410-9fe1-9d1591cc4771/sandbox/trunk/setuptools%4041777from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
|
<commit_before>from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
_install_scripts.run(self) # run first to set up self.outfiles
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
<commit_msg>Fix "legacy mode" trying to install scripts when there are none.
--HG--
branch : setuptools
extra : convert_revision : svn%3A6015fed2-1504-0410-9fe1-9d1591cc4771/sandbox/trunk/setuptools%4041777<commit_after>from distutils.command.install_scripts import install_scripts \
as _install_scripts
from easy_install import get_script_args
from pkg_resources import Distribution, PathMetadata, ensure_directory
import os
from distutils import log
class install_scripts(_install_scripts):
"""Do normal script install, plus any egg_info wrapper scripts"""
def run(self):
self.run_command("egg_info")
if self.distribution.scripts:
_install_scripts.run(self) # run first to set up self.outfiles
else:
self.outfiles = []
ei_cmd = self.get_finalized_command("egg_info")
dist = Distribution(
ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
ei_cmd.egg_name, ei_cmd.egg_version,
)
for args in get_script_args(dist):
self.write_script(*args)
def write_script(self, script_name, contents, mode="t", *ignored):
"""Write an executable file to the scripts directory"""
log.info("Installing %s script to %s", script_name, self.install_dir)
target = os.path.join(self.install_dir, script_name)
self.outfiles.append(target)
if not self.dry_run:
ensure_directory(target)
f = open(target,"w"+mode)
f.write(contents)
f.close()
try:
os.chmod(target,0755)
except (AttributeError, os.error):
pass
|
03bef06e48d513b882b417476e60544eade1cdc4
|
gitcms/simplecms/urls.py
|
gitcms/simplecms/urls.py
|
from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.+)/?', views.article),
)
|
from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.*)/?', views.article),
)
|
Work for the empty url
|
Work for the empty url
|
Python
|
agpl-3.0
|
luispedro/django-gitcms,luispedro/django-gitcms
|
from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.+)/?', views.article),
)
Work for the empty url
|
from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.*)/?', views.article),
)
|
<commit_before>from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.+)/?', views.article),
)
<commit_msg>Work for the empty url<commit_after>
|
from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.*)/?', views.article),
)
|
from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.+)/?', views.article),
)
Work for the empty urlfrom django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.*)/?', views.article),
)
|
<commit_before>from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.+)/?', views.article),
)
<commit_msg>Work for the empty url<commit_after>from django.conf.urls.defaults import *
import views
urlpatterns = patterns('',
(r'^(?P<url>.*)/?', views.article),
)
|
f9312ee3441d4d6e60b323e88afc4dc0284ed66b
|
derrida/__init__.py
|
derrida/__init__.py
|
__version_info__ = (1, 0, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (1, 0, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
Set version to 1.0 final !
|
Set version to 1.0 final !
|
Python
|
apache-2.0
|
Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django
|
__version_info__ = (1, 0, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
Set version to 1.0 final !
|
__version_info__ = (1, 0, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
<commit_before>__version_info__ = (1, 0, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
<commit_msg>Set version to 1.0 final !<commit_after>
|
__version_info__ = (1, 0, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (1, 0, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
Set version to 1.0 final !__version_info__ = (1, 0, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
<commit_before>__version_info__ = (1, 0, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
<commit_msg>Set version to 1.0 final !<commit_after>__version_info__ = (1, 0, 0, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
56236454f252ab8feee461c49c26b9eee70a7e09
|
vpython/_vector_import_helper.py
|
vpython/_vector_import_helper.py
|
import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# synonyms in GlowScript
vec = vector
|
import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# Remove platform from the namespace now that we are done with it
del platform
# synonyms in GlowScript
vec = vector
|
Delete platform so that it doesn't end up in the user's namespace
|
Delete platform so that it doesn't end up in the user's namespace
|
Python
|
mit
|
BruceSherwood/vpython-jupyter,mwcraig/vpython-jupyter,BruceSherwood/vpython-jupyter,mwcraig/vpython-jupyter,mwcraig/vpython-jupyter,mwcraig/vpython-jupyter,BruceSherwood/vpython-jupyter,BruceSherwood/vpython-jupyter
|
import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# synonyms in GlowScript
vec = vector
Delete platform so that it doesn't end up in the user's namespace
|
import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# Remove platform from the namespace now that we are done with it
del platform
# synonyms in GlowScript
vec = vector
|
<commit_before>import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# synonyms in GlowScript
vec = vector
<commit_msg>Delete platform so that it doesn't end up in the user's namespace<commit_after>
|
import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# Remove platform from the namespace now that we are done with it
del platform
# synonyms in GlowScript
vec = vector
|
import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# synonyms in GlowScript
vec = vector
Delete platform so that it doesn't end up in the user's namespaceimport platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# Remove platform from the namespace now that we are done with it
del platform
# synonyms in GlowScript
vec = vector
|
<commit_before>import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# synonyms in GlowScript
vec = vector
<commit_msg>Delete platform so that it doesn't end up in the user's namespace<commit_after>import platform
try:
if platform.python_implementation() == 'PyPy':
from .vector import * # use pure python vector for PyPy
else:
from .cyvector import *
v = vector(0,0,0)
except:
from .vector import *
# Remove platform from the namespace now that we are done with it
del platform
# synonyms in GlowScript
vec = vector
|
8c4059b9467b586ba54e387c9cf7f134a71aaba8
|
utils.py
|
utils.py
|
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
def appid():
if have_appserver:
return get_application_id()
else:
try:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if not have_appserver:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
def appid():
if have_appserver:
return get_application_id()
else:
try:
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
Fix a bug introduced in the last commit
|
Fix a bug introduced in the last commit
|
Python
|
bsd-3-clause
|
potatolondon/djangoappengine-1-4,potatolondon/djangoappengine-1-4
|
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
def appid():
if have_appserver:
return get_application_id()
else:
try:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
Fix a bug introduced in the last commit
|
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if not have_appserver:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
def appid():
if have_appserver:
return get_application_id()
else:
try:
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
<commit_before>import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
def appid():
if have_appserver:
return get_application_id()
else:
try:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
<commit_msg>Fix a bug introduced in the last commit<commit_after>
|
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if not have_appserver:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
def appid():
if have_appserver:
return get_application_id()
else:
try:
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
def appid():
if have_appserver:
return get_application_id()
else:
try:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
Fix a bug introduced in the last commitimport os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if not have_appserver:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
def appid():
if have_appserver:
return get_application_id()
else:
try:
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
<commit_before>import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
def appid():
if have_appserver:
return get_application_id()
else:
try:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
<commit_msg>Fix a bug introduced in the last commit<commit_after>import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if not have_appserver:
from .boot import PROJECT_DIR
from google.appengine.tools import dev_appserver
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
def appid():
if have_appserver:
return get_application_id()
else:
try:
return appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
|
7558ffc73ebb6300e186fe508497a32acbc0c5ae
|
src/pythonic/test_primes.py
|
src/pythonic/test_primes.py
|
import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_upper_bound_exception():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
|
import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_checking_above_upper_bound_is_an_error():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
|
Reword guard test on upper bounds
|
Reword guard test on upper bounds
|
Python
|
cc0-1.0
|
Michael-F-Bryan/rust-ffi-guide,Michael-F-Bryan/rust-ffi-guide,Michael-F-Bryan/rust-ffi-guide
|
import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_upper_bound_exception():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
Reword guard test on upper bounds
|
import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_checking_above_upper_bound_is_an_error():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
|
<commit_before>import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_upper_bound_exception():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
<commit_msg>Reword guard test on upper bounds<commit_after>
|
import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_checking_above_upper_bound_is_an_error():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
|
import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_upper_bound_exception():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
Reword guard test on upper boundsimport pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_checking_above_upper_bound_is_an_error():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
|
<commit_before>import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_upper_bound_exception():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
<commit_msg>Reword guard test on upper bounds<commit_after>import pytest
import itertools
from main import Primes, Sieve
def test_sieve_limit():
limit = 10000
with Sieve(limit) as s:
assert s.upper_bound() >= limit
def test_checking_above_upper_bound_is_an_error():
limit = 10
with Sieve(limit) as s:
with pytest.raises(IndexError):
s.is_prime(101)
def test_zero_is_not_in_prime_list():
with Primes() as p:
n = 20
assert 0 not in list(itertools.islice(p, n))
def test_number_primes_asked_is_given():
with Primes() as p:
n = 20
assert len(list(itertools.islice(p, n))) == n
|
57157a9ca96cb077079f310fb259847eac5ce6fb
|
ztreeauth/component/factories.py
|
ztreeauth/component/factories.py
|
from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztree.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
|
from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztreecrud.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
|
Fix package path issue caused by previous refactoring commit.
|
Fix package path issue caused by previous refactoring commit.
|
Python
|
mit
|
stana/django-ztree
|
from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztree.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
Fix package path issue caused by previous refactoring commit.
|
from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztreecrud.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
|
<commit_before>from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztree.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
<commit_msg>Fix package path issue caused by previous refactoring commit.<commit_after>
|
from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztreecrud.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
|
from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztree.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
Fix package path issue caused by previous refactoring commit.from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztreecrud.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
|
<commit_before>from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztree.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
<commit_msg>Fix package path issue caused by previous refactoring commit.<commit_after>from django.contrib.auth.models import User, Group
from ztreeauth.models import LocalUser
from ztreecrud.component.factories import create_node_factory
import logging
logger = logging.getLogger('ztreeauth')
def local_user_factory(request, local_user_content_type, **kwargs):
logger.info('creating local user "%s" at %s with groups %s' % (kwargs['username'], (request.tree_context.node and request.tree_context.node.absolute_path), kwargs['groups']))
user = User(username=kwargs['username'])
user.set_password(kwargs['password1'])
user.save()
# create LocalUser
local_user = LocalUser(user=user)
local_user.save()
# set auth groups for local_user
for group_name in kwargs['groups']:
grp = Group.objects.get(name=group_name)
local_user.groups.add(grp)
if hasattr(request, 'user'):
username = request.user.username
else:
# if serving backend tree web service, no auth and no request.user
username = kwargs.get('authenticated_username')
new_node = create_node_factory(local_user, parent_node=request.tree_context.node, username=username, slug=user.username)
return new_node
|
8036a12794f61192dbd8639c84395d8cbb31fb31
|
axes_login_actions/signals.py
|
axes_login_actions/signals.py
|
# -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
|
# -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
|
Use importlib from Python instead from Django
|
Use importlib from Python instead from Django
|
Python
|
bsd-3-clause
|
eht16/django-axes-login-actions
|
# -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
Use importlib from Python instead from Django
|
# -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
<commit_msg>Use importlib from Python instead from Django<commit_after>
|
# -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
|
# -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
Use importlib from Python instead from Django# -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils.importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
<commit_msg>Use importlib from Python instead from Django<commit_after># -*- coding: utf-8 -*-
from axes.models import AccessAttempt
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from importlib import import_module
DEFAULT_ACTION = 'axes_login_actions.actions.email.notify'
ACTIONS = getattr(settings, 'AXES_LOGIN_ACTIONS', [DEFAULT_ACTION])
#----------------------------------------------------------------------
def import_dotted_path(path):
"""
Takes a dotted path to a member name in a module, and returns
the member after importing it.
"""
# stolen from Mezzanine (mezzanine.utils.importing.import_dotted_path)
try:
module_path, member_name = path.rsplit(".", 1)
module = import_module(module_path)
return getattr(module, member_name)
except (ValueError, ImportError, AttributeError), e:
raise ImportError("Could not import the name: %s: %s" % (path, e))
#----------------------------------------------------------------------
@receiver(post_save, sender=AccessAttempt)
def access_attempt_handler(sender, instance, **kwargs):
for action_path in ACTIONS:
action = import_dotted_path(action_path)
action(instance, **kwargs)
|
8e4f09592d6b4d681a62026b56dca29abeed88d7
|
backend/scripts/ddirdenorm.py
|
backend/scripts/ddirdenorm.py
|
#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
|
#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
|
Handle non-existent files in the database.
|
Handle non-existent files in the database.
|
Python
|
mit
|
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
|
#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
Handle non-existent files in the database.
|
#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
|
<commit_before>#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
<commit_msg>Handle non-existent files in the database.<commit_after>
|
#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
|
#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
Handle non-existent files in the database.#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
|
<commit_before>#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
<commit_msg>Handle non-existent files in the database.<commit_after>#!/usr/bin/env python
import rethinkdb as r
import optparse
if __name__ == "__main__":
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="port",
help="rethinkdb port", default=30815)
(options, args) = parser.parse_args()
conn = r.connect('localhost', int(options.port), db='materialscommons')
selection = list(r.table('datadirs').run(conn))
for datadir in selection:
print "Updating datadir %s" % (datadir['name'])
ddir = {}
ddir['id'] = datadir['id']
ddir['name'] = datadir['name']
ddir['owner'] = datadir['owner']
ddir['birthtime'] = datadir['birthtime']
ddir['datafiles'] = []
for dfid in datadir['datafiles']:
datafile = r.table('datafiles').get(dfid).run(conn)
if datafile is None:
continue
df = {}
df['id'] = datafile['id']
df['name'] = datafile['name']
df['owner'] = datafile['owner']
df['birthtime'] = datafile['birthtime']
df['size'] = datafile['size']
df['checksum'] = datafile['checksum']
ddir['datafiles'].append(df)
r.table('datadirs_denorm').insert(ddir).run(conn)
|
d6726447559b371886ae9885702a5da71c25c0fe
|
querylist/__init__.py
|
querylist/__init__.py
|
"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = "0.2.0"
__author__ = "Thomas Welfley"
__all__ = ['QueryList', 'BetterDict']
|
"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = '0.2.0'
__author__ = 'Thomas Welfley'
__all__ = ['QueryList', 'BetterDict']
|
Fix single vs double quote inconsistency
|
Fix single vs double quote inconsistency
|
Python
|
mit
|
thomasw/querylist
|
"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = "0.2.0"
__author__ = "Thomas Welfley"
__all__ = ['QueryList', 'BetterDict']
Fix single vs double quote inconsistency
|
"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = '0.2.0'
__author__ = 'Thomas Welfley'
__all__ = ['QueryList', 'BetterDict']
|
<commit_before>"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = "0.2.0"
__author__ = "Thomas Welfley"
__all__ = ['QueryList', 'BetterDict']
<commit_msg>Fix single vs double quote inconsistency<commit_after>
|
"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = '0.2.0'
__author__ = 'Thomas Welfley'
__all__ = ['QueryList', 'BetterDict']
|
"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = "0.2.0"
__author__ = "Thomas Welfley"
__all__ = ['QueryList', 'BetterDict']
Fix single vs double quote inconsistency"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = '0.2.0'
__author__ = 'Thomas Welfley'
__all__ = ['QueryList', 'BetterDict']
|
<commit_before>"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = "0.2.0"
__author__ = "Thomas Welfley"
__all__ = ['QueryList', 'BetterDict']
<commit_msg>Fix single vs double quote inconsistency<commit_after>"""QueryList provides a simple way to filter lists of objects."""
from querylist.list import QueryList
from querylist.dict import BetterDict
__version__ = '0.2.0'
__author__ = 'Thomas Welfley'
__all__ = ['QueryList', 'BetterDict']
|
e2fdee671e23fe06cc191b4940f611369c9e90b5
|
waterfall_wall/models.py
|
waterfall_wall/models.py
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.BigIntegerField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.AutoField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
id = models.AutoField(primary_key=True)
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
|
Fix model id to auto increment
|
Fix model id to auto increment
|
Python
|
mit
|
carlcarl/rcard,carlcarl/rcard
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.BigIntegerField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
Fix model id to auto increment
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.AutoField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
id = models.AutoField(primary_key=True)
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
|
<commit_before># This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.BigIntegerField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
<commit_msg>Fix model id to auto increment<commit_after>
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.AutoField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
id = models.AutoField(primary_key=True)
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
|
# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.BigIntegerField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
Fix model id to auto increment# This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.AutoField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
id = models.AutoField(primary_key=True)
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
|
<commit_before># This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.BigIntegerField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
<commit_msg>Fix model id to auto increment<commit_after># This is an auto-generated Django model module.
# You'll have to do the following manually to clean this up:
# * Rearrange models' order
# * Make sure each model has one field with primary_key=True
# * Remove `managed = False` lines if you wish to allow Django to create, modify, and delete the table
# Feel free to rename the models, but don't rename db_table values or field names.
#
# Also note: You'll have to insert the output of 'django-admin sqlcustom [app_label]'
# into your database.
from __future__ import unicode_literals
from django.db import models
import django_pgjsonb
class Feed(models.Model):
id = models.AutoField(primary_key=True)
article_id = models.BigIntegerField(unique=True)
article_json = django_pgjsonb.JSONField()
class Meta:
db_table = 'feed'
class Image(models.Model):
id = models.AutoField(primary_key=True)
article = models.ForeignKey(Feed, blank=True, null=True)
url = models.TextField()
nude_percent = models.IntegerField(blank=True, null=True)
path = models.ImageField(null=True)
class Meta:
db_table = 'image'
unique_together = (('url', 'article'),)
|
fd95be0137f23643d99e49b2acdaf28a73e0ae43
|
read_FVCOM_results.py
|
read_FVCOM_results.py
|
from netCDF4 import Dataset, MFDataset
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
|
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
from netCDF4 import Dataset, MFDataset
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
def getSurfaceElevation(Z, idx):
"""
Extract the surface elevation from Z at index ind. If ind is multiple
values, extract and return the surface elevations at all those locations.
Z is usually extracted from the dict created when using readFVCOM() on a
NetCDF file.
"""
import numpy as np
nt, nx = np.shape(Z)
surfaceElevation = np.empty([nt,np.shape(idx)[0]])
for cnt, i in enumerate(idx):
surfaceElevation[:,cnt] = Z[:,i]
return surfaceElevation
|
Add function to extract surface elevation from a 2D array of surface elevations
|
Add function to extract surface elevation from a 2D array of surface elevations
|
Python
|
mit
|
pwcazenave/PyFVCOM
|
from netCDF4 import Dataset, MFDataset
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
Add function to extract surface elevation from a 2D array of surface elevations
|
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
from netCDF4 import Dataset, MFDataset
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
def getSurfaceElevation(Z, idx):
"""
Extract the surface elevation from Z at index ind. If ind is multiple
values, extract and return the surface elevations at all those locations.
Z is usually extracted from the dict created when using readFVCOM() on a
NetCDF file.
"""
import numpy as np
nt, nx = np.shape(Z)
surfaceElevation = np.empty([nt,np.shape(idx)[0]])
for cnt, i in enumerate(idx):
surfaceElevation[:,cnt] = Z[:,i]
return surfaceElevation
|
<commit_before>from netCDF4 import Dataset, MFDataset
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
<commit_msg>Add function to extract surface elevation from a 2D array of surface elevations<commit_after>
|
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
from netCDF4 import Dataset, MFDataset
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
def getSurfaceElevation(Z, idx):
"""
Extract the surface elevation from Z at index ind. If ind is multiple
values, extract and return the surface elevations at all those locations.
Z is usually extracted from the dict created when using readFVCOM() on a
NetCDF file.
"""
import numpy as np
nt, nx = np.shape(Z)
surfaceElevation = np.empty([nt,np.shape(idx)[0]])
for cnt, i in enumerate(idx):
surfaceElevation[:,cnt] = Z[:,i]
return surfaceElevation
|
from netCDF4 import Dataset, MFDataset
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
Add function to extract surface elevation from a 2D array of surface elevations
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
from netCDF4 import Dataset, MFDataset
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
def getSurfaceElevation(Z, idx):
"""
Extract the surface elevation from Z at index ind. If ind is multiple
values, extract and return the surface elevations at all those locations.
Z is usually extracted from the dict created when using readFVCOM() on a
NetCDF file.
"""
import numpy as np
nt, nx = np.shape(Z)
surfaceElevation = np.empty([nt,np.shape(idx)[0]])
for cnt, i in enumerate(idx):
surfaceElevation[:,cnt] = Z[:,i]
return surfaceElevation
|
<commit_before>from netCDF4 import Dataset, MFDataset
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
<commit_msg>Add function to extract surface elevation from a 2D array of surface elevations<commit_after>
def readFVCOM(file, varList, noisy=False):
"""
Read in the FVCOM results file and spit out numpy arrays for
each of the variables.
"""
from netCDF4 import Dataset, MFDataset
rootgrp = Dataset(file, 'r')
mfdata = MFDataset(file)
if noisy:
print "File format: " + rootgrp.file_format
FVCOM = {}
for key, var in rootgrp.variables.items():
if noisy:
print 'Found ' + key,
if key in varList:
if noisy:
print '(extracted)'
FVCOM[key] = mfdata.variables[key][:]
else:
if noisy:
print
return FVCOM
def getSurfaceElevation(Z, idx):
"""
Extract the surface elevation from Z at index ind. If ind is multiple
values, extract and return the surface elevations at all those locations.
Z is usually extracted from the dict created when using readFVCOM() on a
NetCDF file.
"""
import numpy as np
nt, nx = np.shape(Z)
surfaceElevation = np.empty([nt,np.shape(idx)[0]])
for cnt, i in enumerate(idx):
surfaceElevation[:,cnt] = Z[:,i]
return surfaceElevation
|
9c0c49ee1d9181e54d3058e88af7f11f65b8dd5d
|
dotsecrets/utils.py
|
dotsecrets/utils.py
|
import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0600
|
import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0o600
|
Make octal number compatible with Python3
|
Make octal number compatible with Python3
|
Python
|
bsd-3-clause
|
oohlaf/dotsecrets
|
import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0600
Make octal number compatible with Python3
|
import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0o600
|
<commit_before>import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0600
<commit_msg>Make octal number compatible with Python3<commit_after>
|
import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0o600
|
import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0600
Make octal number compatible with Python3import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0o600
|
<commit_before>import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0600
<commit_msg>Make octal number compatible with Python3<commit_after>import os
import stat
class CopyFilter(object):
def sub(self, line):
return line
def is_only_user_readable(filename):
"""Return true if and only if filename is readable by user and
unreadable by group and others."""
mode = stat.S_IMODE(os.stat(filename).st_mode)
return mode == 0o600
|
e3833d0c8352fa33e6b77200310edfdb96b2cd5a
|
chipy_org/libs/social_auth_pipelines.py
|
chipy_org/libs/social_auth_pipelines.py
|
from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user = None, is_new = False, *args,
**kwargs):
'''
Check if a user with this email already exists. If they do, don't create an account.
'''
if not user:
if User.objects.filter(email = details.get('email')).exists():
msg = ugettext('This email is already in use. First login with your other account and under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
else:
return social_auth_create_user(backend, details, response, uid, username, user = None, *args, **kwargs)
else:
return {}
|
from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user=None, is_new=False, *args,
**kwargs):
"""Check if a user with this email already exists. If they do, don't create an account."""
if not user:
try:
user = User.objects.filter(email=details.get('email'))
except User.DoesNotExist:
return social_auth_create_user(backend, details, response, uid, username, user=None,
*args, **kwargs)
else:
if backend.name == 'google-oauth2':
# We provide and exception here for users upgrading.
return {'user': user}
msg = ugettext('This email is already in use. First login with your other account and '
'under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
|
Return the user in the pipeline if the backend is google oauth2
|
Return the user in the pipeline if the backend is google oauth2
|
Python
|
mit
|
bharathelangovan/chipy.org,brianray/chipy.org,chicagopython/chipy.org,agfor/chipy.org,tanyaschlusser/chipy.org,bharathelangovan/chipy.org,agfor/chipy.org,tanyaschlusser/chipy.org,chicagopython/chipy.org,brianray/chipy.org,chicagopython/chipy.org,tanyaschlusser/chipy.org,brianray/chipy.org,agfor/chipy.org,chicagopython/chipy.org,bharathelangovan/chipy.org
|
from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user = None, is_new = False, *args,
**kwargs):
'''
Check if a user with this email already exists. If they do, don't create an account.
'''
if not user:
if User.objects.filter(email = details.get('email')).exists():
msg = ugettext('This email is already in use. First login with your other account and under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
else:
return social_auth_create_user(backend, details, response, uid, username, user = None, *args, **kwargs)
else:
return {}
Return the user in the pipeline if the backend is google oauth2
|
from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user=None, is_new=False, *args,
**kwargs):
"""Check if a user with this email already exists. If they do, don't create an account."""
if not user:
try:
user = User.objects.filter(email=details.get('email'))
except User.DoesNotExist:
return social_auth_create_user(backend, details, response, uid, username, user=None,
*args, **kwargs)
else:
if backend.name == 'google-oauth2':
# We provide and exception here for users upgrading.
return {'user': user}
msg = ugettext('This email is already in use. First login with your other account and '
'under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
|
<commit_before>from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user = None, is_new = False, *args,
**kwargs):
'''
Check if a user with this email already exists. If they do, don't create an account.
'''
if not user:
if User.objects.filter(email = details.get('email')).exists():
msg = ugettext('This email is already in use. First login with your other account and under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
else:
return social_auth_create_user(backend, details, response, uid, username, user = None, *args, **kwargs)
else:
return {}
<commit_msg>Return the user in the pipeline if the backend is google oauth2<commit_after>
|
from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user=None, is_new=False, *args,
**kwargs):
"""Check if a user with this email already exists. If they do, don't create an account."""
if not user:
try:
user = User.objects.filter(email=details.get('email'))
except User.DoesNotExist:
return social_auth_create_user(backend, details, response, uid, username, user=None,
*args, **kwargs)
else:
if backend.name == 'google-oauth2':
# We provide and exception here for users upgrading.
return {'user': user}
msg = ugettext('This email is already in use. First login with your other account and '
'under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
|
from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user = None, is_new = False, *args,
**kwargs):
'''
Check if a user with this email already exists. If they do, don't create an account.
'''
if not user:
if User.objects.filter(email = details.get('email')).exists():
msg = ugettext('This email is already in use. First login with your other account and under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
else:
return social_auth_create_user(backend, details, response, uid, username, user = None, *args, **kwargs)
else:
return {}
Return the user in the pipeline if the backend is google oauth2from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user=None, is_new=False, *args,
**kwargs):
"""Check if a user with this email already exists. If they do, don't create an account."""
if not user:
try:
user = User.objects.filter(email=details.get('email'))
except User.DoesNotExist:
return social_auth_create_user(backend, details, response, uid, username, user=None,
*args, **kwargs)
else:
if backend.name == 'google-oauth2':
# We provide and exception here for users upgrading.
return {'user': user}
msg = ugettext('This email is already in use. First login with your other account and '
'under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
|
<commit_before>from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user = None, is_new = False, *args,
**kwargs):
'''
Check if a user with this email already exists. If they do, don't create an account.
'''
if not user:
if User.objects.filter(email = details.get('email')).exists():
msg = ugettext('This email is already in use. First login with your other account and under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
else:
return social_auth_create_user(backend, details, response, uid, username, user = None, *args, **kwargs)
else:
return {}
<commit_msg>Return the user in the pipeline if the backend is google oauth2<commit_after>from django.utils.translation import ugettext
from django.contrib.auth.models import User
from social_auth.backends.pipeline.user import create_user as social_auth_create_user
from social_auth.exceptions import AuthAlreadyAssociated
def create_user(backend, details, response, uid, username, user=None, is_new=False, *args,
**kwargs):
"""Check if a user with this email already exists. If they do, don't create an account."""
if not user:
try:
user = User.objects.filter(email=details.get('email'))
except User.DoesNotExist:
return social_auth_create_user(backend, details, response, uid, username, user=None,
*args, **kwargs)
else:
if backend.name == 'google-oauth2':
# We provide and exception here for users upgrading.
return {'user': user}
msg = ugettext('This email is already in use. First login with your other account and '
'under the top right menu click add account.')
raise AuthAlreadyAssociated(backend, msg % {
'provider': backend.name
})
|
2b39a322860171dd3d947fb2733519f260c74c70
|
src/webilder/AboutDialog.py
|
src/webilder/AboutDialog.py
|
'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2010')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
|
'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2011')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
|
Add 2011 to the year range in the about dialog
|
Add 2011 to the year range in the about dialog
|
Python
|
bsd-3-clause
|
thesamet/webilder,thesamet/webilder,thesamet/webilder
|
'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2010')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
Add 2011 to the year range in the about dialog
|
'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2011')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
|
<commit_before>'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2010')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
<commit_msg>Add 2011 to the year range in the about dialog<commit_after>
|
'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2011')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
|
'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2010')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
Add 2011 to the year range in the about dialog'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2011')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
|
<commit_before>'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2010')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
<commit_msg>Add 2011 to the year range in the about dialog<commit_after>'''
File : AboutDialog.py
Author : Nadav Samet
Contact : thesamet@gmail.com
Date : 2010 Jun 17
Description : Controller for the about dialog.
'''
from webilder import __version__
import gtk
import pkg_resources
def show_about_dialog(name):
"""Shows the about dialog."""
about = gtk.AboutDialog()
about.set_name(name)
about.set_version(__version__)
about.set_copyright('Nadav Samet, 2005-2011')
about.set_website('http://www.webilder.org')
about.set_authors(['Nadav Samet <thesamet@gmail.com>'])
about.set_translator_credits(
'French by Nicolas ELIE <chrystalyst@free.fr>\n'
'Alessio Leonarduzzi <alessio.leonarduzzi@gmail.com>')
icon = gtk.gdk.pixbuf_new_from_file(
pkg_resources.resource_filename(__name__, 'ui/camera48.png'))
about.set_logo(icon),
about.set_icon(icon),
about.run()
about.destroy()
|
46972788b2f4c3b3ac79e2d2fb9b8dd6a3834148
|
src/yunohost/utils/error.py
|
src/yunohost/utils/error.py
|
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""Yunohost base exception"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""
Yunohost base exception
The (only?) main difference with MoulinetteError being that keys
are translated via m18n.n (namespace) instead of m18n.g (global?)
"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
Add comment about the motivation behind YunohostError
|
Add comment about the motivation behind YunohostError
|
Python
|
agpl-3.0
|
YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost
|
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""Yunohost base exception"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
Add comment about the motivation behind YunohostError
|
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""
Yunohost base exception
The (only?) main difference with MoulinetteError being that keys
are translated via m18n.n (namespace) instead of m18n.g (global?)
"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
<commit_before># -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""Yunohost base exception"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
<commit_msg>Add comment about the motivation behind YunohostError<commit_after>
|
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""
Yunohost base exception
The (only?) main difference with MoulinetteError being that keys
are translated via m18n.n (namespace) instead of m18n.g (global?)
"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
# -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""Yunohost base exception"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
Add comment about the motivation behind YunohostError# -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""
Yunohost base exception
The (only?) main difference with MoulinetteError being that keys
are translated via m18n.n (namespace) instead of m18n.g (global?)
"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
<commit_before># -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""Yunohost base exception"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
<commit_msg>Add comment about the motivation behind YunohostError<commit_after># -*- coding: utf-8 -*-
""" License
Copyright (C) 2018 YUNOHOST.ORG
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program; if not, see http://www.gnu.org/licenses
"""
from moulinette.core import MoulinetteError
from moulinette import m18n
class YunohostError(MoulinetteError):
"""
Yunohost base exception
The (only?) main difference with MoulinetteError being that keys
are translated via m18n.n (namespace) instead of m18n.g (global?)
"""
def __init__(self, key, __raw_msg__=False, *args, **kwargs):
if __raw_msg__:
msg = key
else:
msg = m18n.n(key, *args, **kwargs)
super(YunohostError, self).__init__(msg, __raw_msg__=True)
|
b2f5e71e15eec47efd1b8faed97ec614b78deaf6
|
test/test_stream.py
|
test/test_stream.py
|
import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
t.write(data)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == None
|
from __future__ import print_function
import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
print(data,end='',file=t)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == None
|
Add compatibility with Python 3.3.
|
Add compatibility with Python 3.3.
|
Python
|
mit
|
Knio/pynmea2,adamfazzari/pynmea2,ampledata/pynmea2,silentquasar/pynmea2,lobocv/pynmea2
|
import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
t.write(data)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == NoneAdd compatibility with Python 3.3.
|
from __future__ import print_function
import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
print(data,end='',file=t)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == None
|
<commit_before>import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
t.write(data)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == None<commit_msg>Add compatibility with Python 3.3.<commit_after>
|
from __future__ import print_function
import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
print(data,end='',file=t)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == None
|
import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
t.write(data)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == NoneAdd compatibility with Python 3.3.from __future__ import print_function
import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
print(data,end='',file=t)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == None
|
<commit_before>import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
t.write(data)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == None<commit_msg>Add compatibility with Python 3.3.<commit_after>from __future__ import print_function
import pynmea2
from tempfile import TemporaryFile
def test_stream():
data = "$GPGGA,184353.07,1929.045,S,02410.506,E,1,04,2.6,100.00,M,-33.9,M,,0000*6D\n"
sr = pynmea2.NMEAStreamReader()
assert len(sr.next('')) == 0
assert len(sr.next(data)) == 1
assert len(sr.next(data)) == 1
sr = pynmea2.NMEAStreamReader()
assert len(sr.next(data)) == 1
assert len(sr.next(data[:10])) == 0
assert len(sr.next(data[10:])) == 1
sr = pynmea2.NMEAStreamReader()
assert sr.next() == []
t = TemporaryFile()
print(data,end='',file=t)
t.seek(0)
sr = pynmea2.NMEAStreamReader(t)
assert len(sr.next()) == 1
assert len(sr.next()) == 0
sr = pynmea2.NMEAStreamReader(data)
assert sr.stream == None
|
6c73fd3ca1c6459a3a8987a2a5c77b0247eda128
|
tests/test_users.py
|
tests/test_users.py
|
import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
|
import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
@patch.object(users, 'call')
def test_useradd_command(self, call):
call.return_value = 0
self.user_init.create_user({
'username': 'foo',
})
expected = [
'useradd', '-U', 'foo'
]
call.assert_called_with(expected)
|
Create a test asserting the useradd command
|
Create a test asserting the useradd command
|
Python
|
bsd-3-clause
|
pglbutt/spanky,pglbutt/spanky,pglbutt/spanky
|
import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
Create a test asserting the useradd command
|
import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
@patch.object(users, 'call')
def test_useradd_command(self, call):
call.return_value = 0
self.user_init.create_user({
'username': 'foo',
})
expected = [
'useradd', '-U', 'foo'
]
call.assert_called_with(expected)
|
<commit_before>import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
<commit_msg>Create a test asserting the useradd command<commit_after>
|
import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
@patch.object(users, 'call')
def test_useradd_command(self, call):
call.return_value = 0
self.user_init.create_user({
'username': 'foo',
})
expected = [
'useradd', '-U', 'foo'
]
call.assert_called_with(expected)
|
import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
Create a test asserting the useradd commandimport pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
@patch.object(users, 'call')
def test_useradd_command(self, call):
call.return_value = 0
self.user_init.create_user({
'username': 'foo',
})
expected = [
'useradd', '-U', 'foo'
]
call.assert_called_with(expected)
|
<commit_before>import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
<commit_msg>Create a test asserting the useradd command<commit_after>import pytest
from mock import patch, Mock
from spanky.lib import users
class TestCreateUsers(object):
def setup(self):
self.conf = [
{'username': 'foo'},
{'username': 'bar'},
]
self.user_init = users.UserInit(self.conf)
def test_build(self):
self.user_init.create_user = Mock()
self.user_init.build()
assert len(self.user_init.create_user.mock_calls) == 2
@patch.object(users, 'call')
def test_create_users(self, call):
call.return_value = 0
self.user_init.create_user({'username': 'foo'})
assert call.called
@patch.object(users, 'call')
def test_create_users_raises_error_on_fail(self, call):
call.return_value = 1
assert pytest.raises(Exception, self.user_init.create_user, {'username': 'foo'})
@patch.object(users, 'call')
def test_useradd_command(self, call):
call.return_value = 0
self.user_init.create_user({
'username': 'foo',
})
expected = [
'useradd', '-U', 'foo'
]
call.assert_called_with(expected)
|
409722f0e075385e05a77513f6dbd9c3b540bfac
|
txpoloniex/const.py
|
txpoloniex/const.py
|
"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
|
"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
DATE_FORMAT='%Y-%m-%d %H:%M:%S'
|
Add DATE_FORMAT for parsing any datetime strings
|
Add DATE_FORMAT for parsing any datetime strings
Poloniex seems to use a fixed output format for datetime strings
|
Python
|
apache-2.0
|
congruency/txpoloniex
|
"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
Add DATE_FORMAT for parsing any datetime strings
Poloniex seems to use a fixed output format for datetime strings
|
"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
DATE_FORMAT='%Y-%m-%d %H:%M:%S'
|
<commit_before>"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
<commit_msg>Add DATE_FORMAT for parsing any datetime strings
Poloniex seems to use a fixed output format for datetime strings<commit_after>
|
"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
DATE_FORMAT='%Y-%m-%d %H:%M:%S'
|
"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
Add DATE_FORMAT for parsing any datetime strings
Poloniex seems to use a fixed output format for datetime strings"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
DATE_FORMAT='%Y-%m-%d %H:%M:%S'
|
<commit_before>"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
<commit_msg>Add DATE_FORMAT for parsing any datetime strings
Poloniex seems to use a fixed output format for datetime strings<commit_after>"""
Constant values for the Poloniex API
"""
PUBLIC_API = 'https://poloniex.com/public'
PRIVATE_API = 'https://poloniex.com/tradingApi'
PUBLIC_COMMANDS = [
'returnTicker',
'return24hVolume',
'returnOrderBook',
'returnTradeHistory',
'returnChartData',
'returnCurrencies',
'returnLoanOrders',
]
PRIVATE_COMMANDS = [
'returnBalances',
'returnCompleteBalances',
'returnDepositAddresses',
'generateNewAddress',
'returnDepositsWithdrawals',
'returnOpenOrders',
'returnTradeHistory',
'returnAvailableAccountBalances',
'returnTradableBalances',
'returnOpenLoanOffers',
'returnOrderTrades',
'returnActiveLoans',
'returnLendingHistory',
'createLoanOffer',
'cancelLoanOffer',
'toggleAutoRenew',
'buy',
'sell',
'cancelOrder',
'moveOrder',
'withdraw',
'returnFeeInfo',
'transferBalance',
'returnMarginAccountSummary',
'marginBuy',
'marginSell',
'getMarginPosition',
'closeMarginPosition',
]
DATE_FORMAT='%Y-%m-%d %H:%M:%S'
|
74f8b419083189ba666459888d1427193c38873e
|
netdisco/discoverables/apple_tv.py
|
netdisco/discoverables/apple_tv.py
|
"""Discover Apple TV media players."""
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
"""Discover Apple TV media players."""
import ipaddress
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
Add missing host field to Apple TV
|
Add missing host field to Apple TV
|
Python
|
mit
|
brburns/netdisco,balloob/netdisco
|
"""Discover Apple TV media players."""
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
Add missing host field to Apple TV
|
"""Discover Apple TV media players."""
import ipaddress
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
<commit_before>"""Discover Apple TV media players."""
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
<commit_msg>Add missing host field to Apple TV<commit_after>
|
"""Discover Apple TV media players."""
import ipaddress
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
"""Discover Apple TV media players."""
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
Add missing host field to Apple TV"""Discover Apple TV media players."""
import ipaddress
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
<commit_before>"""Discover Apple TV media players."""
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
<commit_msg>Add missing host field to Apple TV<commit_after>"""Discover Apple TV media players."""
import ipaddress
from . import MDNSDiscoverable
# pylint: disable=too-few-public-methods
class Discoverable(MDNSDiscoverable):
"""Add support for Apple TV devices."""
def __init__(self, nd):
super(Discoverable, self).__init__(nd, '_appletv-v2._tcp.local.')
def info_from_entry(self, entry):
"""Returns most important info from mDNS entries."""
props = entry.properties
info = {
'host': str(ipaddress.ip_address(entry.address)),
'name': props.get(b'Name').decode('utf-8').replace('\xa0', ' '),
'hsgid': props.get(b'hG').decode('utf-8')
}
return info
def get_info(self):
"""Get details from Apple TV instances."""
return [self.info_from_entry(entry) for entry in self.get_entries()]
|
2f074527b1c1a776d944aa4f487b2f35b388db28
|
cities_light/tests/test_unicode_decode_error.py
|
cities_light/tests/test_unicode_decode_error.py
|
from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.settings.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
|
from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.management.commands.cities_light.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
|
Patch settings from inside the command module
|
Patch settings from inside the command module
|
Python
|
mit
|
greenday2/django-cities-light,yourlabs/django-cities-light,max-arnold/django-cities-light,KevinGrahamFoster/django-cities-light,max-arnold/django-cities-light
|
from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.settings.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
Patch settings from inside the command module
|
from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.management.commands.cities_light.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
|
<commit_before>from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.settings.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
<commit_msg>Patch settings from inside the command module<commit_after>
|
from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.management.commands.cities_light.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
|
from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.settings.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
Patch settings from inside the command modulefrom __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.management.commands.cities_light.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
|
<commit_before>from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.settings.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
<commit_msg>Patch settings from inside the command module<commit_after>from __future__ import unicode_literals
import os
import mock
from django import test
from django.core.management import call_command
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
FIXTURE_DIR = os.path.abspath(os.path.join(BASE_DIR, 'tests', 'fixtures'))
def mock_source(setting, short_name): # noqa
return mock.patch(
'cities_light.management.commands.cities_light.%s_SOURCES' %
setting.upper(), ['file://%s/%s.txt' % (FIXTURE_DIR, short_name)])
class TestUnicodeDecodeError(test.TransactionTestCase):
"""Test case which demonstrates UnicodeDecodeError."""
@mock_source('city', 'kemerovo_city')
@mock_source('region', 'kemerovo_region')
@mock_source('country', 'kemerovo_country')
def test_unicode_decode_error(self):
"""."""
call_command('cities_light', force_import_all=True)
|
4486fba6dd75dab67c25221653f2384455eda9be
|
tests/test_sorting_and_searching/test_binary_search.py
|
tests/test_sorting_and_searching/test_binary_search.py
|
import unittest
from sorting_and_searching import binary_search_recursive
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
import unittest
from aids.sorting_and_searching.binary_search import binary_search_recursive, binary_search_iterative
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def test_binary_search_iterative(self):
result = binary_search_iterative(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
Add unit tests for binary search recursive and iterative
|
Add unit tests for binary search recursive and iterative
|
Python
|
mit
|
ueg1990/aids
|
import unittest
from sorting_and_searching import binary_search_recursive
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
Add unit tests for binary search recursive and iterative
|
import unittest
from aids.sorting_and_searching.binary_search import binary_search_recursive, binary_search_iterative
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def test_binary_search_iterative(self):
result = binary_search_iterative(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from sorting_and_searching import binary_search_recursive
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Add unit tests for binary search recursive and iterative<commit_after>
|
import unittest
from aids.sorting_and_searching.binary_search import binary_search_recursive, binary_search_iterative
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def test_binary_search_iterative(self):
result = binary_search_iterative(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
import unittest
from sorting_and_searching import binary_search_recursive
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
Add unit tests for binary search recursive and iterativeimport unittest
from aids.sorting_and_searching.binary_search import binary_search_recursive, binary_search_iterative
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def test_binary_search_iterative(self):
result = binary_search_iterative(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from sorting_and_searching import binary_search_recursive
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
<commit_msg>Add unit tests for binary search recursive and iterative<commit_after>import unittest
from aids.sorting_and_searching.binary_search import binary_search_recursive, binary_search_iterative
class BinarySearchTestCase(unittest.TestCase):
'''
Unit tests for binary search
'''
def setUp(self):
self.example_1 = [2, 3, 4, 10, 40]
def test_binary_search_recursive(self):
result = binary_search_recursive(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def test_binary_search_iterative(self):
result = binary_search_iterative(self.example_1, 0, len(self.example_1) - 1, 10)
self.assertEqual(result,3)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main()
|
b85d2605224ac3d079c71c93e9d94329366ec0f9
|
functest/utils/env.py
|
functest/utils/env.py
|
import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'true',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
|
import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'false',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
|
Undo setting CI_DEBUG to true by default
|
Undo setting CI_DEBUG to true by default
Change-Id: I24ca35ada7591e93413cdda1905ee01f77131889
Signed-off-by: Romanos Skiadas <2ae8a933f732975064e7c256d0625d1633389b98@intracom-telecom.com>
|
Python
|
apache-2.0
|
opnfv/functest,opnfv/functest,mywulin/functest,mywulin/functest
|
import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'true',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
Undo setting CI_DEBUG to true by default
Change-Id: I24ca35ada7591e93413cdda1905ee01f77131889
Signed-off-by: Romanos Skiadas <2ae8a933f732975064e7c256d0625d1633389b98@intracom-telecom.com>
|
import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'false',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
|
<commit_before>import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'true',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
<commit_msg>Undo setting CI_DEBUG to true by default
Change-Id: I24ca35ada7591e93413cdda1905ee01f77131889
Signed-off-by: Romanos Skiadas <2ae8a933f732975064e7c256d0625d1633389b98@intracom-telecom.com><commit_after>
|
import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'false',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
|
import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'true',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
Undo setting CI_DEBUG to true by default
Change-Id: I24ca35ada7591e93413cdda1905ee01f77131889
Signed-off-by: Romanos Skiadas <2ae8a933f732975064e7c256d0625d1633389b98@intracom-telecom.com>import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'false',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
|
<commit_before>import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'true',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
<commit_msg>Undo setting CI_DEBUG to true by default
Change-Id: I24ca35ada7591e93413cdda1905ee01f77131889
Signed-off-by: Romanos Skiadas <2ae8a933f732975064e7c256d0625d1633389b98@intracom-telecom.com><commit_after>import os
import re
default_envs = {
'NODE_NAME': 'unknown_pod',
'CI_DEBUG': 'false',
'DEPLOY_SCENARIO': 'os-nosdn-nofeature-noha',
'DEPLOY_TYPE': 'virt',
'INSTALLER_TYPE': None,
'INSTALLER_IP': None,
'BUILD_TAG': None,
'OS_ENDPOINT_TYPE': None,
'OS_AUTH_URL': None
}
class Environment(object):
def __init__(self):
for k, v in os.environ.iteritems():
self.__setattr__(k, v)
for k, v in default_envs.iteritems():
if k not in os.environ:
self.__setattr__(k, v)
self._set_ci_run()
self._set_ci_loop()
def _set_ci_run(self):
if self.BUILD_TAG:
self.IS_CI_RUN = True
else:
self.IS_CI_RUN = False
def _set_ci_loop(self):
if self.BUILD_TAG and re.search("daily", self.BUILD_TAG):
self.CI_LOOP = "daily"
else:
self.CI_LOOP = "weekly"
ENV = Environment()
|
92204c154ab964d02faade72642a395356f1fa9b
|
aorun/losses.py
|
aorun/losses.py
|
import torch
def mean_squared_error(true, pred):
return torch.mean((true - pred)**2)
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown objective: {obj}')
else:
raise Exception('Objective must be a callable or str')
|
import torch
def mean_squared_error(true, pred):
return ((true - pred)**2).mean()
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown loss: {obj}')
else:
raise Exception('Loss must be a callable or str')
|
Change error message to loss
|
Change error message to loss
|
Python
|
mit
|
ramon-oliveira/aorun
|
import torch
def mean_squared_error(true, pred):
return torch.mean((true - pred)**2)
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown objective: {obj}')
else:
raise Exception('Objective must be a callable or str')
Change error message to loss
|
import torch
def mean_squared_error(true, pred):
return ((true - pred)**2).mean()
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown loss: {obj}')
else:
raise Exception('Loss must be a callable or str')
|
<commit_before>import torch
def mean_squared_error(true, pred):
return torch.mean((true - pred)**2)
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown objective: {obj}')
else:
raise Exception('Objective must be a callable or str')
<commit_msg>Change error message to loss<commit_after>
|
import torch
def mean_squared_error(true, pred):
return ((true - pred)**2).mean()
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown loss: {obj}')
else:
raise Exception('Loss must be a callable or str')
|
import torch
def mean_squared_error(true, pred):
return torch.mean((true - pred)**2)
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown objective: {obj}')
else:
raise Exception('Objective must be a callable or str')
Change error message to lossimport torch
def mean_squared_error(true, pred):
return ((true - pred)**2).mean()
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown loss: {obj}')
else:
raise Exception('Loss must be a callable or str')
|
<commit_before>import torch
def mean_squared_error(true, pred):
return torch.mean((true - pred)**2)
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown objective: {obj}')
else:
raise Exception('Objective must be a callable or str')
<commit_msg>Change error message to loss<commit_after>import torch
def mean_squared_error(true, pred):
return ((true - pred)**2).mean()
def binary_crossentropy(true, pred, eps=1e-9):
p1 = true * torch.log(pred + eps)
p2 = (1 - true) * torch.log(1 - pred + eps)
return torch.mean(-(p1 + p2))
def categorical_crossentropy(true, pred, eps=1e-9):
return torch.mean(-torch.sum(true * torch.log(pred + eps), dim=1))
# aliases short names
mse = mean_squared_error
def get(obj):
if callable(obj):
return obj
elif type(obj) is str:
if obj in globals():
return globals()[obj]
else:
raise Exception(f'Unknown loss: {obj}')
else:
raise Exception('Loss must be a callable or str')
|
b28c448261d8310a801fe8824ab2852fd50960da
|
zinnia/urls/shortlink.py
|
zinnia/urls/shortlink.py
|
"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^e(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
|
"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
|
Revert "Add a "e" prefix to avoid issue when reaching the ID 46656"
|
Revert "Add a "e" prefix to avoid issue when reaching the ID 46656"
This reverts commit e730c552c0b6095a8962f29a114069fb335d7ec6.
|
Python
|
bsd-3-clause
|
aorzh/django-blog-zinnia,bywbilly/django-blog-zinnia,ghachey/django-blog-zinnia,marctc/django-blog-zinnia,dapeng0802/django-blog-zinnia,dapeng0802/django-blog-zinnia,aorzh/django-blog-zinnia,petecummings/django-blog-zinnia,ghachey/django-blog-zinnia,bywbilly/django-blog-zinnia,extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,Maplecroft/django-blog-zinnia,aorzh/django-blog-zinnia,Maplecroft/django-blog-zinnia,Zopieux/django-blog-zinnia,ZuluPro/django-blog-zinnia,1844144/django-blog-zinnia,Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,Zopieux/django-blog-zinnia,ZuluPro/django-blog-zinnia,marctc/django-blog-zinnia,dapeng0802/django-blog-zinnia,ZuluPro/django-blog-zinnia,Fantomas42/django-blog-zinnia,1844144/django-blog-zinnia,extertioner/django-blog-zinnia,Fantomas42/django-blog-zinnia,petecummings/django-blog-zinnia,1844144/django-blog-zinnia,petecummings/django-blog-zinnia,bywbilly/django-blog-zinnia,marctc/django-blog-zinnia,extertioner/django-blog-zinnia,Zopieux/django-blog-zinnia
|
"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^e(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
Revert "Add a "e" prefix to avoid issue when reaching the ID 46656"
This reverts commit e730c552c0b6095a8962f29a114069fb335d7ec6.
|
"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
|
<commit_before>"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^e(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
<commit_msg>Revert "Add a "e" prefix to avoid issue when reaching the ID 46656"
This reverts commit e730c552c0b6095a8962f29a114069fb335d7ec6.<commit_after>
|
"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
|
"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^e(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
Revert "Add a "e" prefix to avoid issue when reaching the ID 46656"
This reverts commit e730c552c0b6095a8962f29a114069fb335d7ec6."""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
|
<commit_before>"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^e(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
<commit_msg>Revert "Add a "e" prefix to avoid issue when reaching the ID 46656"
This reverts commit e730c552c0b6095a8962f29a114069fb335d7ec6.<commit_after>"""Urls for the Zinnia entries short link"""
from django.conf.urls import url
from django.conf.urls import patterns
from zinnia.views.shortlink import EntryShortLink
urlpatterns = patterns(
'',
url(r'^(?P<token>[\da-z]+)/$',
EntryShortLink.as_view(),
name='entry_shortlink'),
)
|
36950cf9cffd5083408bc257e37a846835029d58
|
symposion/speakers/admin.py
|
symposion/speakers/admin.py
|
from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
|
from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
raw_id_fields = ["user"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
|
Fix user selection for speaker add
|
Fix user selection for speaker add
When adding a speaker in the admin, the staff person had to
pick a user from a huge dropdown with all the users, unsorted.
Change 'user' to a raw id field, meaning to pick a user, the staff member clicks a magnifying glass icon next to the field and gets a popup listing all the users in an admin list page with sortable columns and search.
|
Python
|
bsd-3-clause
|
PyCon/pycon,PyCon/pycon,PyCon/pycon,PyCon/pycon
|
from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
Fix user selection for speaker add
When adding a speaker in the admin, the staff person had to
pick a user from a huge dropdown with all the users, unsorted.
Change 'user' to a raw id field, meaning to pick a user, the staff member clicks a magnifying glass icon next to the field and gets a popup listing all the users in an admin list page with sortable columns and search.
|
from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
raw_id_fields = ["user"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
|
<commit_before>from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
<commit_msg>Fix user selection for speaker add
When adding a speaker in the admin, the staff person had to
pick a user from a huge dropdown with all the users, unsorted.
Change 'user' to a raw id field, meaning to pick a user, the staff member clicks a magnifying glass icon next to the field and gets a popup listing all the users in an admin list page with sortable columns and search.<commit_after>
|
from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
raw_id_fields = ["user"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
|
from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
Fix user selection for speaker add
When adding a speaker in the admin, the staff person had to
pick a user from a huge dropdown with all the users, unsorted.
Change 'user' to a raw id field, meaning to pick a user, the staff member clicks a magnifying glass icon next to the field and gets a popup listing all the users in an admin list page with sortable columns and search.from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
raw_id_fields = ["user"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
|
<commit_before>from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
<commit_msg>Fix user selection for speaker add
When adding a speaker in the admin, the staff person had to
pick a user from a huge dropdown with all the users, unsorted.
Change 'user' to a raw id field, meaning to pick a user, the staff member clicks a magnifying glass icon next to the field and gets a popup listing all the users in an admin list page with sortable columns and search.<commit_after>from django.contrib import admin
from markedit.admin import MarkEditAdmin
from symposion.speakers.models import Speaker
class SpeakerAdmin(MarkEditAdmin):
list_display = ["name", "email", "created", "twitter_username"]
raw_id_fields = ["user"]
search_fields = ["name", "twitter_username"]
class MarkEdit:
fields = ['biography', ]
options = {
'preview': 'below'
}
admin.site.register(Speaker, SpeakerAdmin)
|
f0fcde0988b705de752aa20e08c4c05fb504af3d
|
oz/__init__.py
|
oz/__init__.py
|
"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedor
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
|
"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedora/x86_64/os/</url>
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
|
Fix up the example to be proper XML.
|
Fix up the example to be proper XML.
Signed-off-by: Chris Lalancette <60b62644009db6b194cc0445b64e9b27bb26433a@redhat.com>
|
Python
|
lgpl-2.1
|
mgagne/oz,NeilBryant/oz,nullr0ute/oz,imcleod/oz,ndonegan/oz,NeilBryant/oz,moofrank/oz,imcleod/oz,ndonegan/oz,cernops/oz,mgagne/oz,nullr0ute/oz,moofrank/oz,clalancette/oz,clalancette/oz,cernops/oz
|
"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedor
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
Fix up the example to be proper XML.
Signed-off-by: Chris Lalancette <60b62644009db6b194cc0445b64e9b27bb26433a@redhat.com>
|
"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedora/x86_64/os/</url>
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
|
<commit_before>"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedor
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
<commit_msg>Fix up the example to be proper XML.
Signed-off-by: Chris Lalancette <60b62644009db6b194cc0445b64e9b27bb26433a@redhat.com><commit_after>
|
"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedora/x86_64/os/</url>
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
|
"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedor
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
Fix up the example to be proper XML.
Signed-off-by: Chris Lalancette <60b62644009db6b194cc0445b64e9b27bb26433a@redhat.com>"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedora/x86_64/os/</url>
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
|
<commit_before>"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedor
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
<commit_msg>Fix up the example to be proper XML.
Signed-off-by: Chris Lalancette <60b62644009db6b194cc0445b64e9b27bb26433a@redhat.com><commit_after>"""
Class for automated operating system installation.
Oz is a set of classes to do automated operating system installation. It
has built-in knowledge of the proper things to do for each of the supported
operating systems, so the data that the user must provide is very minimal.
This data is supplied in the form of an XML document that describes what
type of operating system is to be installed and where to get the
installation media. Oz handles the rest.
The simplest Oz program (without error handling or any advanced features)
would look something like:
import oz.TDL
import oz.GuestFactory
tdl_xml = \"\"\"
<template>
<name>f13jeos</name>
<os>
<name>Fedora</name>
<version>13</version>
<arch>x86_64</arch>
<install type='url'>
<url>http://download.fedoraproject.org/pub/fedora/linux/releases/13/Fedora/x86_64/os/</url>
</install>
</os>
<description>Fedora 13</description>
</template>
\"\"\"
tdl = oz.TDL.TDL(tdl_xml)
guest = oz.GuestFactory.guest_factory(tdl, None, None)
guest.generate_install_media()
guest.generate_diskimage()
guest.install()
"""
|
3ae56f6dc4801013c272cf9b7472522510e4b807
|
1-multiples-of-3-and-5.py
|
1-multiples-of-3-and-5.py
|
def multiples_of_3_and_5(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
if __name__ == '__main__':
print(sum(multiples_of_3_and_5()))
|
from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
if __name__ == '__main__':
print(sum(threes_and_fives_gen(10000000)))
|
Add fun answer to 1 multiples of 3 and 5
|
Add fun answer to 1 multiples of 3 and 5
|
Python
|
mit
|
dawran6/project-euler
|
def multiples_of_3_and_5(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
if __name__ == '__main__':
print(sum(multiples_of_3_and_5()))
Add fun answer to 1 multiples of 3 and 5
|
from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
if __name__ == '__main__':
print(sum(threes_and_fives_gen(10000000)))
|
<commit_before>def multiples_of_3_and_5(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
if __name__ == '__main__':
print(sum(multiples_of_3_and_5()))
<commit_msg>Add fun answer to 1 multiples of 3 and 5<commit_after>
|
from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
if __name__ == '__main__':
print(sum(threes_and_fives_gen(10000000)))
|
def multiples_of_3_and_5(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
if __name__ == '__main__':
print(sum(multiples_of_3_and_5()))
Add fun answer to 1 multiples of 3 and 5from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
if __name__ == '__main__':
print(sum(threes_and_fives_gen(10000000)))
|
<commit_before>def multiples_of_3_and_5(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
if __name__ == '__main__':
print(sum(multiples_of_3_and_5()))
<commit_msg>Add fun answer to 1 multiples of 3 and 5<commit_after>from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
if __name__ == '__main__':
print(sum(threes_and_fives_gen(10000000)))
|
4817784c6e1050034faabb1b3d04382fe8997b41
|
numpy/_array_api/_constants.py
|
numpy/_array_api/_constants.py
|
from ._array_object import ndarray
from ._dtypes import float64
import numpy as np
e = ndarray._new(np.array(np.e, dtype=float64))
inf = ndarray._new(np.array(np.inf, dtype=float64))
nan = ndarray._new(np.array(np.nan, dtype=float64))
pi = ndarray._new(np.array(np.pi, dtype=float64))
|
import numpy as np
e = np.e
inf = np.inf
nan = np.nan
pi = np.pi
|
Make the array API constants Python floats
|
Make the array API constants Python floats
|
Python
|
bsd-3-clause
|
seberg/numpy,numpy/numpy,simongibbons/numpy,charris/numpy,mhvk/numpy,simongibbons/numpy,mattip/numpy,seberg/numpy,pdebuyl/numpy,mattip/numpy,charris/numpy,endolith/numpy,numpy/numpy,anntzer/numpy,jakirkham/numpy,mhvk/numpy,anntzer/numpy,endolith/numpy,seberg/numpy,endolith/numpy,mattip/numpy,simongibbons/numpy,numpy/numpy,seberg/numpy,jakirkham/numpy,charris/numpy,anntzer/numpy,mhvk/numpy,rgommers/numpy,simongibbons/numpy,pdebuyl/numpy,mhvk/numpy,rgommers/numpy,rgommers/numpy,jakirkham/numpy,simongibbons/numpy,anntzer/numpy,mhvk/numpy,charris/numpy,numpy/numpy,rgommers/numpy,jakirkham/numpy,pdebuyl/numpy,mattip/numpy,endolith/numpy,pdebuyl/numpy,jakirkham/numpy
|
from ._array_object import ndarray
from ._dtypes import float64
import numpy as np
e = ndarray._new(np.array(np.e, dtype=float64))
inf = ndarray._new(np.array(np.inf, dtype=float64))
nan = ndarray._new(np.array(np.nan, dtype=float64))
pi = ndarray._new(np.array(np.pi, dtype=float64))
Make the array API constants Python floats
|
import numpy as np
e = np.e
inf = np.inf
nan = np.nan
pi = np.pi
|
<commit_before>from ._array_object import ndarray
from ._dtypes import float64
import numpy as np
e = ndarray._new(np.array(np.e, dtype=float64))
inf = ndarray._new(np.array(np.inf, dtype=float64))
nan = ndarray._new(np.array(np.nan, dtype=float64))
pi = ndarray._new(np.array(np.pi, dtype=float64))
<commit_msg>Make the array API constants Python floats<commit_after>
|
import numpy as np
e = np.e
inf = np.inf
nan = np.nan
pi = np.pi
|
from ._array_object import ndarray
from ._dtypes import float64
import numpy as np
e = ndarray._new(np.array(np.e, dtype=float64))
inf = ndarray._new(np.array(np.inf, dtype=float64))
nan = ndarray._new(np.array(np.nan, dtype=float64))
pi = ndarray._new(np.array(np.pi, dtype=float64))
Make the array API constants Python floatsimport numpy as np
e = np.e
inf = np.inf
nan = np.nan
pi = np.pi
|
<commit_before>from ._array_object import ndarray
from ._dtypes import float64
import numpy as np
e = ndarray._new(np.array(np.e, dtype=float64))
inf = ndarray._new(np.array(np.inf, dtype=float64))
nan = ndarray._new(np.array(np.nan, dtype=float64))
pi = ndarray._new(np.array(np.pi, dtype=float64))
<commit_msg>Make the array API constants Python floats<commit_after>import numpy as np
e = np.e
inf = np.inf
nan = np.nan
pi = np.pi
|
b24ae1320af5387e339a12dc00e214330525e549
|
src/BibleBot.Frontend/application.py
|
src/BibleBot.Frontend/application.py
|
"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
test_guilds=[362503610006765568],
sync_commands_debug=True,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
Move commands out of test.
|
Move commands out of test.
|
Python
|
mpl-2.0
|
BibleBot/BibleBot,BibleBot/BibleBot,BibleBot/BibleBot
|
"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
test_guilds=[362503610006765568],
sync_commands_debug=True,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
Move commands out of test.
|
"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
<commit_before>"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
test_guilds=[362503610006765568],
sync_commands_debug=True,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
<commit_msg>Move commands out of test.<commit_after>
|
"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
test_guilds=[362503610006765568],
sync_commands_debug=True,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
Move commands out of test."""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
<commit_before>"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
test_guilds=[362503610006765568],
sync_commands_debug=True,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
<commit_msg>Move commands out of test.<commit_after>"""
Copyright (C) 2016-2022 Kerygma Digital Co.
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this file,
You can obtain one at https://mozilla.org/MPL/2.0/.
"""
import disnake
from disnake.ext import commands
from logger import VyLogger
import os
logger = VyLogger("default")
intents = disnake.Intents.default()
intents.message_content = True
bot = commands.AutoShardedBot(
command_prefix=commands.when_mentioned,
intents=intents,
)
bot.load_extension("cogs")
bot.run(os.environ.get("DISCORD_TOKEN"))
|
18925af2a74c20e86867bce9c480b5cd710b6b32
|
openbudgets/apps/sheets/utilities.py
|
openbudgets/apps/sheets/utilities.py
|
from django.conf import settings
def is_comparable():
"""Sets the value of TemplateNode.comparable to True or False."""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_DEFAULT
return value
|
from django.conf import settings
def is_node_comparable(instance):
"""Sets the value of TemplateNode.comparable to True or False.
Relies on the non-abstract TemplateNode implementation where nodes
can belong to many templates.
"""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE
if all([t.is_blueprint for t in instance.templates.all()]):
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_IN_BLUEPRINT
else:
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_NOT_IN_BLUEPRINT
return value
|
Set comparable state of node.
|
Set comparable state of node.
|
Python
|
bsd-3-clause
|
openbudgets/openbudgets,openbudgets/openbudgets,pwalsh/openbudgets,pwalsh/openbudgets,openbudgets/openbudgets,pwalsh/openbudgets
|
from django.conf import settings
def is_comparable():
"""Sets the value of TemplateNode.comparable to True or False."""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_DEFAULT
return value
Set comparable state of node.
|
from django.conf import settings
def is_node_comparable(instance):
"""Sets the value of TemplateNode.comparable to True or False.
Relies on the non-abstract TemplateNode implementation where nodes
can belong to many templates.
"""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE
if all([t.is_blueprint for t in instance.templates.all()]):
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_IN_BLUEPRINT
else:
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_NOT_IN_BLUEPRINT
return value
|
<commit_before>from django.conf import settings
def is_comparable():
"""Sets the value of TemplateNode.comparable to True or False."""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_DEFAULT
return value
<commit_msg>Set comparable state of node.<commit_after>
|
from django.conf import settings
def is_node_comparable(instance):
"""Sets the value of TemplateNode.comparable to True or False.
Relies on the non-abstract TemplateNode implementation where nodes
can belong to many templates.
"""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE
if all([t.is_blueprint for t in instance.templates.all()]):
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_IN_BLUEPRINT
else:
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_NOT_IN_BLUEPRINT
return value
|
from django.conf import settings
def is_comparable():
"""Sets the value of TemplateNode.comparable to True or False."""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_DEFAULT
return value
Set comparable state of node.from django.conf import settings
def is_node_comparable(instance):
"""Sets the value of TemplateNode.comparable to True or False.
Relies on the non-abstract TemplateNode implementation where nodes
can belong to many templates.
"""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE
if all([t.is_blueprint for t in instance.templates.all()]):
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_IN_BLUEPRINT
else:
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_NOT_IN_BLUEPRINT
return value
|
<commit_before>from django.conf import settings
def is_comparable():
"""Sets the value of TemplateNode.comparable to True or False."""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_DEFAULT
return value
<commit_msg>Set comparable state of node.<commit_after>from django.conf import settings
def is_node_comparable(instance):
"""Sets the value of TemplateNode.comparable to True or False.
Relies on the non-abstract TemplateNode implementation where nodes
can belong to many templates.
"""
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE
if all([t.is_blueprint for t in instance.templates.all()]):
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_IN_BLUEPRINT
else:
value = settings.OPENBUDGETS_COMPARABLE_TEMPLATENODE_NOT_IN_BLUEPRINT
return value
|
dae46049bd72ce1599fd4169e3d8d6bd8ca1c622
|
drfdocs/api_docs.py
|
drfdocs/api_docs.py
|
from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
|
from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
excluded_endpoints = ["serve"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern) and (pattern.callback.__name__ not in self.excluded_endpoints):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
|
Exclude "serve" view (static files)
|
Exclude "serve" view (static files)
|
Python
|
bsd-2-clause
|
manosim/django-rest-framework-docs,ekonstantinidis/django-rest-framework-docs,manosim/django-rest-framework-docs,manosim/django-rest-framework-docs,ekonstantinidis/django-rest-framework-docs,ekonstantinidis/django-rest-framework-docs
|
from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
Exclude "serve" view (static files)
|
from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
excluded_endpoints = ["serve"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern) and (pattern.callback.__name__ not in self.excluded_endpoints):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
|
<commit_before>from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
<commit_msg>Exclude "serve" view (static files)<commit_after>
|
from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
excluded_endpoints = ["serve"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern) and (pattern.callback.__name__ not in self.excluded_endpoints):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
|
from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
Exclude "serve" view (static files)from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
excluded_endpoints = ["serve"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern) and (pattern.callback.__name__ not in self.excluded_endpoints):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
|
<commit_before>from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
<commit_msg>Exclude "serve" view (static files)<commit_after>from django.conf import settings
from django.core.urlresolvers import RegexURLResolver, RegexURLPattern
class ApiDocumentation(object):
excluded_apps = ["admin", "drfdocs"]
excluded_endpoints = ["serve"]
root_urlconf = __import__(settings.ROOT_URLCONF)
def __init__(self):
self.view_names = []
self.get_all_view_names(self.root_urlconf.urls.urlpatterns)
def get_all_view_names(self, urlpatterns):
for pattern in urlpatterns:
if isinstance(pattern, RegexURLResolver) and (pattern.app_name not in self.excluded_apps):
self.get_all_view_names(pattern.url_patterns)
elif isinstance(pattern, RegexURLPattern) and (pattern.callback.__name__ not in self.excluded_endpoints):
self.view_names.append(pattern.callback.__name__)
def get_views(self):
return self.view_names
|
02cb413b8e6671cead4ec9af55acef2daf451fc0
|
contributr/contributr/wsgi.py
|
contributr/contributr/wsgi.py
|
"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
application = Cling(get_wsgi_application())
|
"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
if os.environ.get("DJANGO_SETTINGS_MODULE") == "contributr.settings.production":
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
from dj_static import Cling
application = Cling(get_wsgi_application())
else:
application = get_wsgi_application()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
|
Move production Cling settings into production-only block
|
Move production Cling settings into production-only block
The Cling import and call are moved into an if-block that is only
executed when the server is run with production settings. This
makes the server run locally again.
Resolves: #42
|
Python
|
mit
|
SanketDG/contributr,iAmMrinal0/contributr,sofianugraha/contributr,nickpolet/contributr,Heasummn/contributr,JoshAddington/contributr,sofianugraha/contributr,troyleak/contributr,nickpolet/contributr,iAmMrinal0/contributr,SanketDG/contributr,planetirf/contributr,planetirf/contributr,planetirf/contributr,kakorrhaphio/contributr,abdullah2891/contributr,Heasummn/contributr,npaul2811/contributr,npaul2811/contributr,troyleak/contributr,sofianugraha/contributr,kakorrhaphio/contributr,Djenesis/contributr,jherrlin/contributr,abdullah2891/contributr,Heasummn/contributr,JoshAddington/contributr,jherrlin/contributr,jherrlin/contributr,iAmMrinal0/contributr,JoshAddington/contributr,kakorrhaphio/contributr,Djenesis/contributr,troyleak/contributr,SanketDG/contributr,abdullah2891/contributr,nickpolet/contributr,npaul2811/contributr,Djenesis/contributr
|
"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
application = Cling(get_wsgi_application())
Move production Cling settings into production-only block
The Cling import and call are moved into an if-block that is only
executed when the server is run with production settings. This
makes the server run locally again.
Resolves: #42
|
"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
if os.environ.get("DJANGO_SETTINGS_MODULE") == "contributr.settings.production":
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
from dj_static import Cling
application = Cling(get_wsgi_application())
else:
application = get_wsgi_application()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
|
<commit_before>"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
application = Cling(get_wsgi_application())
<commit_msg>Move production Cling settings into production-only block
The Cling import and call are moved into an if-block that is only
executed when the server is run with production settings. This
makes the server run locally again.
Resolves: #42<commit_after>
|
"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
if os.environ.get("DJANGO_SETTINGS_MODULE") == "contributr.settings.production":
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
from dj_static import Cling
application = Cling(get_wsgi_application())
else:
application = get_wsgi_application()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
|
"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
application = Cling(get_wsgi_application())
Move production Cling settings into production-only block
The Cling import and call are moved into an if-block that is only
executed when the server is run with production settings. This
makes the server run locally again.
Resolves: #42"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
if os.environ.get("DJANGO_SETTINGS_MODULE") == "contributr.settings.production":
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
from dj_static import Cling
application = Cling(get_wsgi_application())
else:
application = get_wsgi_application()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
|
<commit_before>"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
application = Cling(get_wsgi_application())
<commit_msg>Move production Cling settings into production-only block
The Cling import and call are moved into an if-block that is only
executed when the server is run with production settings. This
makes the server run locally again.
Resolves: #42<commit_after>"""
WSGI config for contributr project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
if os.environ.get("DJANGO_SETTINGS_MODULE") == "contributr.settings.production":
# Cling is a simple way of serving static assets.
# http://www.kennethreitz.org/essays/introducing-dj-static
from dj_static import Cling
application = Cling(get_wsgi_application())
else:
application = get_wsgi_application()
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "contributr.settings.local")
|
83169cc6e342cd510a97a5e9574eb4701d50d2d8
|
salt/__init__.py
|
salt/__init__.py
|
'''
Make me some salt!
'''
# Import python libs
import os
import optparse
# Import salt libs
import salt.master
import salt.minion
import salt.utils
class Master(object):
'''
Creates a master server
'''
class Minion(object):
'''
Create a minion server
'''
def __init__(self):
self.cli = self.__parse_cli()
self.opts = salt.utils.minion_config(self.cli)
def __parse_cli(self):
'''
Parse the cli input
'''
parser = optparse.OptionParser()
parser.add_option('-f',
'--foreground',
dest='foreground',
default=False,
action='store_true',
help='Run the minion in the foreground')
parser.add_option('-c',
'--config',
dest='config',
default='/etc/salt/minion',
help='Pass in an alternative configuration file')
options, args = parser.parse_args()
cli = {'foreground': options.foreground,
'config': options.config}
return cli
|
Set up the initial calling of the minion routines
|
Set up the initial calling of the minion routines
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Set up the initial calling of the minion routines
|
'''
Make me some salt!
'''
# Import python libs
import os
import optparse
# Import salt libs
import salt.master
import salt.minion
import salt.utils
class Master(object):
'''
Creates a master server
'''
class Minion(object):
'''
Create a minion server
'''
def __init__(self):
self.cli = self.__parse_cli()
self.opts = salt.utils.minion_config(self.cli)
def __parse_cli(self):
'''
Parse the cli input
'''
parser = optparse.OptionParser()
parser.add_option('-f',
'--foreground',
dest='foreground',
default=False,
action='store_true',
help='Run the minion in the foreground')
parser.add_option('-c',
'--config',
dest='config',
default='/etc/salt/minion',
help='Pass in an alternative configuration file')
options, args = parser.parse_args()
cli = {'foreground': options.foreground,
'config': options.config}
return cli
|
<commit_before><commit_msg>Set up the initial calling of the minion routines<commit_after>
|
'''
Make me some salt!
'''
# Import python libs
import os
import optparse
# Import salt libs
import salt.master
import salt.minion
import salt.utils
class Master(object):
'''
Creates a master server
'''
class Minion(object):
'''
Create a minion server
'''
def __init__(self):
self.cli = self.__parse_cli()
self.opts = salt.utils.minion_config(self.cli)
def __parse_cli(self):
'''
Parse the cli input
'''
parser = optparse.OptionParser()
parser.add_option('-f',
'--foreground',
dest='foreground',
default=False,
action='store_true',
help='Run the minion in the foreground')
parser.add_option('-c',
'--config',
dest='config',
default='/etc/salt/minion',
help='Pass in an alternative configuration file')
options, args = parser.parse_args()
cli = {'foreground': options.foreground,
'config': options.config}
return cli
|
Set up the initial calling of the minion routines'''
Make me some salt!
'''
# Import python libs
import os
import optparse
# Import salt libs
import salt.master
import salt.minion
import salt.utils
class Master(object):
'''
Creates a master server
'''
class Minion(object):
'''
Create a minion server
'''
def __init__(self):
self.cli = self.__parse_cli()
self.opts = salt.utils.minion_config(self.cli)
def __parse_cli(self):
'''
Parse the cli input
'''
parser = optparse.OptionParser()
parser.add_option('-f',
'--foreground',
dest='foreground',
default=False,
action='store_true',
help='Run the minion in the foreground')
parser.add_option('-c',
'--config',
dest='config',
default='/etc/salt/minion',
help='Pass in an alternative configuration file')
options, args = parser.parse_args()
cli = {'foreground': options.foreground,
'config': options.config}
return cli
|
<commit_before><commit_msg>Set up the initial calling of the minion routines<commit_after>'''
Make me some salt!
'''
# Import python libs
import os
import optparse
# Import salt libs
import salt.master
import salt.minion
import salt.utils
class Master(object):
'''
Creates a master server
'''
class Minion(object):
'''
Create a minion server
'''
def __init__(self):
self.cli = self.__parse_cli()
self.opts = salt.utils.minion_config(self.cli)
def __parse_cli(self):
'''
Parse the cli input
'''
parser = optparse.OptionParser()
parser.add_option('-f',
'--foreground',
dest='foreground',
default=False,
action='store_true',
help='Run the minion in the foreground')
parser.add_option('-c',
'--config',
dest='config',
default='/etc/salt/minion',
help='Pass in an alternative configuration file')
options, args = parser.parse_args()
cli = {'foreground': options.foreground,
'config': options.config}
return cli
|
|
e63a9430d9ad4d6bbfd6af66b1de617e71490c2c
|
countylimits/views.py
|
countylimits/views.py
|
from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = package['request']['state'] = request.GET['state']
package['data'] = CountyLimit.county_limits_by_state(state)
return Response(package)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
|
from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = request.GET['state']
data = CountyLimit.county_limits_by_state(state)
if data:
package['request']['state'] = request.GET['state']
package['data'] = data
return Response(package)
else:
return Response({'state': 'Invalid state'}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
|
Return 400 when state is invalid
|
Return 400 when state is invalid
|
Python
|
cc0-1.0
|
amymok/owning-a-home-api,cfpb/owning-a-home-api,fna/owning-a-home-api
|
from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = package['request']['state'] = request.GET['state']
package['data'] = CountyLimit.county_limits_by_state(state)
return Response(package)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
Return 400 when state is invalid
|
from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = request.GET['state']
data = CountyLimit.county_limits_by_state(state)
if data:
package['request']['state'] = request.GET['state']
package['data'] = data
return Response(package)
else:
return Response({'state': 'Invalid state'}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
|
<commit_before>from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = package['request']['state'] = request.GET['state']
package['data'] = CountyLimit.county_limits_by_state(state)
return Response(package)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
<commit_msg>Return 400 when state is invalid<commit_after>
|
from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = request.GET['state']
data = CountyLimit.county_limits_by_state(state)
if data:
package['request']['state'] = request.GET['state']
package['data'] = data
return Response(package)
else:
return Response({'state': 'Invalid state'}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
|
from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = package['request']['state'] = request.GET['state']
package['data'] = CountyLimit.county_limits_by_state(state)
return Response(package)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
Return 400 when state is invalidfrom django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = request.GET['state']
data = CountyLimit.county_limits_by_state(state)
if data:
package['request']['state'] = request.GET['state']
package['data'] = data
return Response(package)
else:
return Response({'state': 'Invalid state'}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
|
<commit_before>from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = package['request']['state'] = request.GET['state']
package['data'] = CountyLimit.county_limits_by_state(state)
return Response(package)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
<commit_msg>Return 400 when state is invalid<commit_after>from django.shortcuts import render
from rest_framework import status
from rest_framework.decorators import api_view
from rest_framework.response import Response
from countylimits.models import CountyLimit
@api_view(['GET'])
def county_limits(request):
""" Return all counties with their limits per state. """
if request.method == 'GET':
package = {'request': {}, 'data': []}
if 'state' in request.GET:
state = request.GET['state']
data = CountyLimit.county_limits_by_state(state)
if data:
package['request']['state'] = request.GET['state']
package['data'] = data
return Response(package)
else:
return Response({'state': 'Invalid state'}, status=status.HTTP_400_BAD_REQUEST)
else:
return Response({'detail': 'Required parameter state is missing'}, status=status.HTTP_400_BAD_REQUEST)
|
c8de08c451943f8fc428a611575f1329024e001a
|
webmention_plugin.py
|
webmention_plugin.py
|
from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
sender.send(verify=False)
print "Finished sending webmention"
|
from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
success = sender.send(verify=False)
print "Finished sending webmention: ", success
if success:
print sender.response
else:
print sender.error
|
Add some debugging output to webmentions
|
Add some debugging output to webmentions
|
Python
|
bsd-2-clause
|
Lancey6/redwind,thedod/redwind,thedod/redwind,Lancey6/redwind,Lancey6/redwind
|
from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
sender.send(verify=False)
print "Finished sending webmention"
Add some debugging output to webmentions
|
from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
success = sender.send(verify=False)
print "Finished sending webmention: ", success
if success:
print sender.response
else:
print sender.error
|
<commit_before>from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
sender.send(verify=False)
print "Finished sending webmention"
<commit_msg>Add some debugging output to webmentions<commit_after>
|
from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
success = sender.send(verify=False)
print "Finished sending webmention: ", success
if success:
print sender.response
else:
print sender.error
|
from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
sender.send(verify=False)
print "Finished sending webmention"
Add some debugging output to webmentionsfrom webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
success = sender.send(verify=False)
print "Finished sending webmention: ", success
if success:
print sender.response
else:
print sender.error
|
<commit_before>from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
sender.send(verify=False)
print "Finished sending webmention"
<commit_msg>Add some debugging output to webmentions<commit_after>from webmentiontools.send import WebmentionSend
from webmentiontools.urlinfo import UrlInfo
def handle_new_or_edit(post):
url = post.permalink_url
in_reply_to = post.in_reply_to
if url and in_reply_to:
print "Sending webmention {} to {}".format(url, in_reply_to)
sender = WebmentionSend(url, in_reply_to)
success = sender.send(verify=False)
print "Finished sending webmention: ", success
if success:
print sender.response
else:
print sender.error
|
1046d952415fe48da1d2c06a5abb6e8e31074fab
|
arc_distance.py
|
arc_distance.py
|
from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
#make a change
|
from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
|
Revert "Added files via upload"
|
Revert "Added files via upload"
This reverts commit d93c66fc467a9899343eacdd85e90bdfe8a0dbd3.
|
Python
|
apache-2.0
|
limatthewk/UAVs
|
from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
#make a changeRevert "Added files via upload"
This reverts commit d93c66fc467a9899343eacdd85e90bdfe8a0dbd3.
|
from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
|
<commit_before>from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
#make a change<commit_msg>Revert "Added files via upload"
This reverts commit d93c66fc467a9899343eacdd85e90bdfe8a0dbd3.<commit_after>
|
from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
|
from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
#make a changeRevert "Added files via upload"
This reverts commit d93c66fc467a9899343eacdd85e90bdfe8a0dbd3.from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
|
<commit_before>from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
#make a change<commit_msg>Revert "Added files via upload"
This reverts commit d93c66fc467a9899343eacdd85e90bdfe8a0dbd3.<commit_after>from math import cos,sqrt
import numpy as np
from scipy.optimize import fsolve
def func(p, *data):
x,y = p
i,j,r = data
return ((x+r)*(i-x)+(j-y)*y,(x+r)**2+y**2-r**2)
data = (-7,10,5)
x,y = fsolve(func, [1,1], args=data)
print x,y
d = sqrt((data[0]-x)**2+(data[1]-y)**2)
print d
|
a58cbb8b3c2c2648098cdd3af3f7316f659902c9
|
pythonic_rules.example/upload/__init__.py
|
pythonic_rules.example/upload/__init__.py
|
#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
# root_class.add_child(Interactive())
# root_class.add_child(TCPACK())
# root_class.add_child(SSH())
# root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
|
#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
root_class.add_child(Interactive())
root_class.add_child(TCPACK())
root_class.add_child(SSH())
root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
|
Enable all rules in pythonic_rules
|
Enable all rules in pythonic_rules
Has been disabled to avoid errors until the new design was nos finished.
|
Python
|
bsd-2-clause
|
Anthony25/python_tc_qos
|
#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
# root_class.add_child(Interactive())
# root_class.add_child(TCPACK())
# root_class.add_child(SSH())
# root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
Enable all rules in pythonic_rules
Has been disabled to avoid errors until the new design was nos finished.
|
#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
root_class.add_child(Interactive())
root_class.add_child(TCPACK())
root_class.add_child(SSH())
root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
|
<commit_before>#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
# root_class.add_child(Interactive())
# root_class.add_child(TCPACK())
# root_class.add_child(SSH())
# root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
<commit_msg>Enable all rules in pythonic_rules
Has been disabled to avoid errors until the new design was nos finished.<commit_after>
|
#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
root_class.add_child(Interactive())
root_class.add_child(TCPACK())
root_class.add_child(SSH())
root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
|
#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
# root_class.add_child(Interactive())
# root_class.add_child(TCPACK())
# root_class.add_child(SSH())
# root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
Enable all rules in pythonic_rules
Has been disabled to avoid errors until the new design was nos finished.#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
root_class.add_child(Interactive())
root_class.add_child(TCPACK())
root_class.add_child(SSH())
root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
|
<commit_before>#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
# root_class.add_child(Interactive())
# root_class.add_child(TCPACK())
# root_class.add_child(SSH())
# root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
<commit_msg>Enable all rules in pythonic_rules
Has been disabled to avoid errors until the new design was nos finished.<commit_after>#!/usr/bin/python
from config import INTERFACES
from built_in_classes import RootHTBClass
from .upload import Interactive, TCPACK, SSH, HTTP, Default
def apply_qos():
public_if = INTERFACES["public_if"]
root_class = RootHTBClass(
interface=public_if["name"],
rate=public_if["speed"],
burst=public_if["speed"]/8,
qdisc_prefix_id="1:",
default=1500
)
root_class.add_child(Interactive())
root_class.add_child(TCPACK())
root_class.add_child(SSH())
root_class.add_child(HTTP())
root_class.add_child(Default())
root_class.apply_qos()
|
6d0b2b5787be4d3a23fa74eccebb4935cb85d48b
|
salt/runners/state.py
|
salt/runners/state.py
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{key: val},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{'local': {key: val}},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
Fix traceback because outputter expects data in {'host', data.. } format
|
Fix traceback because outputter expects data in {'host', data.. } format
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{key: val},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
Fix traceback because outputter expects data in {'host', data.. } format
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{'local': {key: val}},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
<commit_before>'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{key: val},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
<commit_msg>Fix traceback because outputter expects data in {'host', data.. } format<commit_after>
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{'local': {key: val}},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{key: val},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
Fix traceback because outputter expects data in {'host', data.. } format'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{'local': {key: val}},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
<commit_before>'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{key: val},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
<commit_msg>Fix traceback because outputter expects data in {'host', data.. } format<commit_after>'''
Execute overstate functions
'''
# Import salt libs
import salt.overstate
import salt.output
def over(env='base', os_fn=None):
'''
Execute an overstate sequence to orchestrate the executing of states
over a group of systems
'''
stage_num = 0
overstate = salt.overstate.OverState(__opts__, env, os_fn)
for stage in overstate.stages_iter():
if isinstance(stage, dict):
# This is highstate data
print('Stage execution results:')
for key, val in stage.items():
salt.output.display_output(
{'local': {key: val}},
'highstate',
opts=__opts__)
elif isinstance(stage, list):
# This is a stage
if stage_num == 0:
print('Executing the following Over State:')
else:
print('Executed Stage:')
salt.output.display_output(stage, 'overstatestage', opts=__opts__)
stage_num += 1
return overstate.over_run
def show_stages(env='base', os_fn=None):
'''
Display the stage data to be executed
'''
overstate = salt.overstate.OverState(__opts__, env, os_fn)
salt.output.display_output(
overstate.over,
'overstatestage',
opts=__opts__)
return overstate.over
|
9f091fcc572eb6a65592f828818b34d3e1269083
|
alg_bellman_ford_shortest_path.py
|
alg_bellman_ford_shortest_path.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3},
'b': {'a': -5}
}
start_vertex = 's'
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3, 'c': 1},
'b': {'a': -5, 'd': 2},
'c': {'b': 1, 'e': 4, 'f': 2},
'd': {'c': 3, 'f': 2},
'e': {},
'f': {'e': 1}
}
start_vertex = 's'
if __name__ == '__main__':
main()
|
Revise main()'s weighted negative graph
|
Revise main()'s weighted negative graph
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3},
'b': {'a': -5}
}
start_vertex = 's'
if __name__ == '__main__':
main()
Revise main()'s weighted negative graph
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3, 'c': 1},
'b': {'a': -5, 'd': 2},
'c': {'b': 1, 'e': 4, 'f': 2},
'd': {'c': 3, 'f': 2},
'e': {},
'f': {'e': 1}
}
start_vertex = 's'
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3},
'b': {'a': -5}
}
start_vertex = 's'
if __name__ == '__main__':
main()
<commit_msg>Revise main()'s weighted negative graph<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3, 'c': 1},
'b': {'a': -5, 'd': 2},
'c': {'b': 1, 'e': 4, 'f': 2},
'd': {'c': 3, 'f': 2},
'e': {},
'f': {'e': 1}
}
start_vertex = 's'
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3},
'b': {'a': -5}
}
start_vertex = 's'
if __name__ == '__main__':
main()
Revise main()'s weighted negative graphfrom __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3, 'c': 1},
'b': {'a': -5, 'd': 2},
'c': {'b': 1, 'e': 4, 'f': 2},
'd': {'c': 3, 'f': 2},
'e': {},
'f': {'e': 1}
}
start_vertex = 's'
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3},
'b': {'a': -5}
}
start_vertex = 's'
if __name__ == '__main__':
main()
<commit_msg>Revise main()'s weighted negative graph<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def bellman_ford(w_graph_d, start_vertex):
"""Bellman-Ford algorithm for weighted / negative graph.
"""
pass
def main():
w_graph_d = {
's': {'a': 2, 'b': 6},
'a': {'b': 3, 'c': 1},
'b': {'a': -5, 'd': 2},
'c': {'b': 1, 'e': 4, 'f': 2},
'd': {'c': 3, 'f': 2},
'e': {},
'f': {'e': 1}
}
start_vertex = 's'
if __name__ == '__main__':
main()
|
31a4253288637070f50a398cd80250176e785a19
|
rnacentral_pipeline/cli/genes.py
|
rnacentral_pipeline/cli/genes.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option("--format", type=click.Choice(write.Format.names(), case_sensitive=False))
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. This assumes that the file is
already split into reasonable chunks.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option(
"--format",
default="csv",
type=click.Choice(write.Format.names(), case_sensitive=False),
)
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. The file can contain all data for a
specific assembly.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
|
Clean up CLI a bit
|
Clean up CLI a bit
Default arguments are useful.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option("--format", type=click.Choice(write.Format.names(), case_sensitive=False))
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. This assumes that the file is
already split into reasonable chunks.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
Clean up CLI a bit
Default arguments are useful.
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option(
"--format",
default="csv",
type=click.Choice(write.Format.names(), case_sensitive=False),
)
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. The file can contain all data for a
specific assembly.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option("--format", type=click.Choice(write.Format.names(), case_sensitive=False))
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. This assumes that the file is
already split into reasonable chunks.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
<commit_msg>Clean up CLI a bit
Default arguments are useful.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option(
"--format",
default="csv",
type=click.Choice(write.Format.names(), case_sensitive=False),
)
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. The file can contain all data for a
specific assembly.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option("--format", type=click.Choice(write.Format.names(), case_sensitive=False))
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. This assumes that the file is
already split into reasonable chunks.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
Clean up CLI a bit
Default arguments are useful.# -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option(
"--format",
default="csv",
type=click.Choice(write.Format.names(), case_sensitive=False),
)
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. The file can contain all data for a
specific assembly.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
|
<commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option("--format", type=click.Choice(write.Format.names(), case_sensitive=False))
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. This assumes that the file is
already split into reasonable chunks.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
<commit_msg>Clean up CLI a bit
Default arguments are useful.<commit_after># -*- coding: utf-8 -*-
"""
Copyright [2009-2020] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import click
from rnacentral_pipeline.rnacentral.genes import build, write
@click.group("genes")
def cli():
"""
A group of commands dealing with building genes.
"""
pass
@cli.command("build")
@click.option(
"--format",
default="csv",
type=click.Choice(write.Format.names(), case_sensitive=False),
)
@click.argument("data_file", type=click.File("r"))
@click.argument("output", type=click.File("w"))
def build_genes(data_file, output, format=None):
"""
Build the genes for the given data file. The file can contain all data for a
specific assembly.
"""
data = build.from_json(data_file)
write.write(data, write.Format.from_name(format), output)
|
5ce78ab69c74f6f3be52fe4afad310952f6a1245
|
scheduler/schedule.py
|
scheduler/schedule.py
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enequeue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
Fix typo on job module
|
Fix typo on job module
|
Python
|
apache-2.0
|
ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enequeue(jobs.calculate_stats)
# Start the scheduler
sched.start()
Fix typo on job module
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
<commit_before>import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enequeue(jobs.calculate_stats)
# Start the scheduler
sched.start()
<commit_msg>Fix typo on job module<commit_after>
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enequeue(jobs.calculate_stats)
# Start the scheduler
sched.start()
Fix typo on job moduleimport sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
<commit_before>import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enequeue(jobs.calculate_stats)
# Start the scheduler
sched.start()
<commit_msg>Fix typo on job module<commit_after>import sys
import time
import logging
logging.basicConfig(level=logging.DEBUG)
from redis import StrictRedis
from rq import Queue
from apscheduler.schedulers.blocking import BlockingScheduler
from d1lod import jobs
conn = StrictRedis(host='redis', port='6379')
q = Queue(connection=conn)
sched = BlockingScheduler()
@sched.scheduled_job('interval', minutes=1)
def queue_update_job():
q.enqueue(jobs.update_graph)
@sched.scheduled_job('interval', minutes=1)
def queue_stats_job():
q.enqueue(jobs.calculate_stats)
@sched.scheduled_job('interval', minutes=1)
def queue_export_job():
q.enqueue(jobs.export_graph)
@sched.scheduled_job('interval', minutes=1)
def print_jobs_job():
sched.print_jobs()
# Wait a bit for Sesame to start
time.sleep(10)
# Queue the stats job first. This creates the repository before any other
# jobs are run.
q.enqueue(jobs.calculate_stats)
# Start the scheduler
sched.start()
|
845acca37f929eeea98cb7fd1690cf5ca1570aeb
|
Lib/test/test_symtable.py
|
Lib/test/test_symtable.py
|
from test.test_support import vereq, TestFailed
import _symtable
symbols = _symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
|
from test.test_support import vereq, TestFailed
import symtable
symbols = symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
|
Test is still disabled, but access through public module
|
Test is still disabled, but access through public module
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
from test.test_support import vereq, TestFailed
import _symtable
symbols = _symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
Test is still disabled, but access through public module
|
from test.test_support import vereq, TestFailed
import symtable
symbols = symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
|
<commit_before>from test.test_support import vereq, TestFailed
import _symtable
symbols = _symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
<commit_msg>Test is still disabled, but access through public module<commit_after>
|
from test.test_support import vereq, TestFailed
import symtable
symbols = symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
|
from test.test_support import vereq, TestFailed
import _symtable
symbols = _symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
Test is still disabled, but access through public modulefrom test.test_support import vereq, TestFailed
import symtable
symbols = symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
|
<commit_before>from test.test_support import vereq, TestFailed
import _symtable
symbols = _symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
<commit_msg>Test is still disabled, but access through public module<commit_after>from test.test_support import vereq, TestFailed
import symtable
symbols = symtable.symtable("def f(x): return x", "?", "exec")
## XXX
## Test disabled because symtable module needs to be rewritten for new compiler
##vereq(symbols[0].name, "global")
##vereq(len([ste for ste in symbols.values() if ste.name == "f"]), 1)
### Bug tickler: SyntaxError file name correct whether error raised
### while parsing or building symbol table.
##def checkfilename(brokencode):
## try:
## _symtable.symtable(brokencode, "spam", "exec")
## except SyntaxError, e:
## vereq(e.filename, "spam")
## else:
## raise TestFailed("no SyntaxError for %r" % (brokencode,))
##checkfilename("def f(x): foo)(") # parse-time
##checkfilename("def f(x): global x") # symtable-build-time
|
9b2999d64b02cc65dc62434a29d0fe841b3d1886
|
tests/commands/test_test.py
|
tests/commands/test_test.py
|
# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
from platformio.commands.test import cli as cli_test
def test_local_env(clirunner, validate_cliresult):
result = clirunner.invoke(
cli_test,
["-d", join("examples", "unit-testing", "calculator"), "-e", "local"])
validate_cliresult(result)
assert all(
[s in result.output for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
|
# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
import pytest
from platformio import util
def test_local_env():
result = util.exec_command(["platformio", "test", "-d",
join("examples", "unit-testing", "calculator"),
"-e", "local"])
if result['returncode'] != 0:
pytest.fail(result)
assert all(
[s in result['out'] for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
|
Fix test for "pio test"
|
Fix test for "pio test"
|
Python
|
apache-2.0
|
platformio/platformio,platformio/platformio-core,platformio/platformio-core
|
# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
from platformio.commands.test import cli as cli_test
def test_local_env(clirunner, validate_cliresult):
result = clirunner.invoke(
cli_test,
["-d", join("examples", "unit-testing", "calculator"), "-e", "local"])
validate_cliresult(result)
assert all(
[s in result.output for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
Fix test for "pio test"
|
# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
import pytest
from platformio import util
def test_local_env():
result = util.exec_command(["platformio", "test", "-d",
join("examples", "unit-testing", "calculator"),
"-e", "local"])
if result['returncode'] != 0:
pytest.fail(result)
assert all(
[s in result['out'] for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
|
<commit_before># Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
from platformio.commands.test import cli as cli_test
def test_local_env(clirunner, validate_cliresult):
result = clirunner.invoke(
cli_test,
["-d", join("examples", "unit-testing", "calculator"), "-e", "local"])
validate_cliresult(result)
assert all(
[s in result.output for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
<commit_msg>Fix test for "pio test"<commit_after>
|
# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
import pytest
from platformio import util
def test_local_env():
result = util.exec_command(["platformio", "test", "-d",
join("examples", "unit-testing", "calculator"),
"-e", "local"])
if result['returncode'] != 0:
pytest.fail(result)
assert all(
[s in result['out'] for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
|
# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
from platformio.commands.test import cli as cli_test
def test_local_env(clirunner, validate_cliresult):
result = clirunner.invoke(
cli_test,
["-d", join("examples", "unit-testing", "calculator"), "-e", "local"])
validate_cliresult(result)
assert all(
[s in result.output for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
Fix test for "pio test"# Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
import pytest
from platformio import util
def test_local_env():
result = util.exec_command(["platformio", "test", "-d",
join("examples", "unit-testing", "calculator"),
"-e", "local"])
if result['returncode'] != 0:
pytest.fail(result)
assert all(
[s in result['out'] for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
|
<commit_before># Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
from platformio.commands.test import cli as cli_test
def test_local_env(clirunner, validate_cliresult):
result = clirunner.invoke(
cli_test,
["-d", join("examples", "unit-testing", "calculator"), "-e", "local"])
validate_cliresult(result)
assert all(
[s in result.output for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
<commit_msg>Fix test for "pio test"<commit_after># Copyright 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os.path import join
import pytest
from platformio import util
def test_local_env():
result = util.exec_command(["platformio", "test", "-d",
join("examples", "unit-testing", "calculator"),
"-e", "local"])
if result['returncode'] != 0:
pytest.fail(result)
assert all(
[s in result['out'] for s in ("[PASSED]", "[IGNORED]", "[FAILED]")])
|
5c117433e1216acbc0e2f53104fab66ae05e86c5
|
instance/config.py
|
instance/config.py
|
import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/test_db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
|
import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///test_db.db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
|
Change testing database to sqlite
|
Change testing database to sqlite
|
Python
|
mit
|
Alweezy/cp2-bucketlist-api,Alweezy/cp2-bucketlist-api,Alweezy/cp2-bucketlist-api
|
import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/test_db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
Change testing database to sqlite
|
import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///test_db.db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
|
<commit_before>import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/test_db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
<commit_msg>Change testing database to sqlite<commit_after>
|
import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///test_db.db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
|
import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/test_db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
Change testing database to sqliteimport os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///test_db.db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
|
<commit_before>import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/test_db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
<commit_msg>Change testing database to sqlite<commit_after>import os
class Config(object):
"""Parent configuration class."""
DEBUG = False
CSRF_ENABLED = True
SECRET_KEY = os.getenv('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.getenv('DATABASE_URL')
class DevelopmentConfig(Config):
"""Configurations for Development."""
DEBUG = True
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/flask_db'
class TestingConfig(Config):
"""Configurations for Testing, with a separate test database."""
TESTING = True
SQLALCHEMY_DATABASE_URI = 'sqlite:///test_db.db'
DEBUG = True
class StagingConfig(Config):
"""Configurations for Staging."""
DEBUG = True
class ProductionConfig(Config):
"""Configurations for Production."""
DEBUG = False
TESTING = False
app_config = {
'development': DevelopmentConfig,
'testing': TestingConfig,
'staging': StagingConfig,
'production': ProductionConfig,
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.