commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
54ab75918503ccaa991f9298c01d54a99b830c9b | games.py | games.py | """
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"name": "Team Awesome",
"score": 1
},
{
"name": "Team Less Awesome",
"score": 0
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
for id in xrange(count):
yield {
"id": id,
"logURL": "about:blank",
"updates": [
{
"status": "complete",
"time": str(now + datetime.timedelta(minutes=id))
}
],
"players": dict(zip(random.sample(players, 2), (0, 1))),
}
| """
This module is for generating fake game data for use with the API.
An example of some game data::
{
"id": 1,
"logURL": "http://derp.nope/",
"updates": [
{
"status": "complete",
"time": "today"
}
],
"players": [
{
"name": "Team Awesome",
"score": 1
},
{
"name": "Team Less Awesome",
"score": 0
},
]
}
"""
import datetime
import random
def game_data(players, count):
now = datetime.datetime.now()
for id in xrange(1, count+1):
yield {
"id": id,
"logURL": "about:blank",
"updates": [
{
"status": "complete",
"time": str(now + datetime.timedelta(minutes=id))
}
],
"players": dict(zip(random.sample(players, 2), (0, 1))),
}
| Use 1-indexed counting for game IDs | Use 1-indexed counting for game IDs
| Python | bsd-3-clause | siggame/ng-games,siggame/ng-games,siggame/ng-games | ---
+++
@@ -30,7 +30,7 @@
def game_data(players, count):
now = datetime.datetime.now()
- for id in xrange(count):
+ for id in xrange(1, count+1):
yield {
"id": id,
"logURL": "about:blank", |
7681cfa394cdc91f77389a77ed8379cd5335f2c9 | setup.py | setup.py | from setuptools import setup, find_packages
GITHUB_ALERT = """**NOTE**: These are the docs for the version of envbuilder in git. For
documentation on the last release, see the `pypi_page <http://pypi.python.org/pypi/envbuilder/>`_."""
readme = open('README.rst', 'r')
unsplit_readme_text = readme.read()
split_text = [x for x in unsplit_readme_text.split('.. split here')
if x]
README_TEXT = split_text[-1]
readme.close()
setup(
name='envbuilder',
author='Jason Baker',
author_email='amnorvend@gmail.com',
version='0.2.0rc',
packages=find_packages(),
setup_requires=['nose'],
install_requires=['ConfigObj>=4.7.0', 'argparse', 'pip', 'virtualenv'],
zip_safe=False,
include_package_data=True,
entry_points = {
'console_scripts' : [
'envb = envbuilder.run:main',
'envbuilder = envbuilder.run:main'
]
},
description = "A package for automatic generation of virtualenvs",
long_description = README_TEXT,
url='http://github.com/jasonbaker/envbuilder',
)
| from setuptools import setup, find_packages
GITHUB_ALERT = """**NOTE**: These are the docs for the version of envbuilder in git. For
documentation on the last release, see the `pypi_page <http://pypi.python.org/pypi/envbuilder/>`_."""
readme = open('README.rst', 'r')
unsplit_readme_text = readme.read()
split_text = [x for x in unsplit_readme_text.split('.. split here')
if x]
README_TEXT = split_text[-1]
readme.close()
setup(
name='envbuilder',
author='Jason Baker',
author_email='amnorvend@gmail.com',
version='0.2.0b2',
packages=find_packages(),
setup_requires=['nose'],
install_requires=['ConfigObj>=4.7.0', 'argparse', 'pip', 'virtualenv'],
zip_safe=False,
include_package_data=True,
entry_points = {
'console_scripts' : [
'envb = envbuilder.run:main',
'envbuilder = envbuilder.run:main'
]
},
description = "A package for automatic generation of virtualenvs",
long_description = README_TEXT,
url='http://github.com/jasonbaker/envbuilder',
)
| Revert "Changing this to a release candidate." | Revert "Changing this to a release candidate."
This reverts commit a9f8afbc1c5a40d0a35e3a9757f8c96da494d35a.
| Python | bsd-3-clause | jasonbaker/envbuilder,jasonbaker/envbuilder | ---
+++
@@ -14,7 +14,7 @@
name='envbuilder',
author='Jason Baker',
author_email='amnorvend@gmail.com',
- version='0.2.0rc',
+ version='0.2.0b2',
packages=find_packages(),
setup_requires=['nose'],
install_requires=['ConfigObj>=4.7.0', 'argparse', 'pip', 'virtualenv'], |
55a87e2fa74af95cbfa86ccde427b16bf1512690 | setup.py | setup.py | #!/usr/bin/env python
import os
from setuptools import setup, find_packages
import skypipe
setup(
name='skypipe',
version=skypipe.VERSION,
author='Jeff Lindsay',
author_email='progrium@gmail.com',
description='Magic pipe in the sky',
long_description=open(os.path.join(os.path.dirname(__file__),
"README.md")).read().replace(':', '::'),
license='MIT',
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
url="http://github.com/progrium/skypipe",
packages=find_packages(),
install_requires=['pyzmq', 'dotcloud', 'argparse'],
zip_safe=False,
package_data={
'skypipe': ['satellite/*']},
entry_points={
'console_scripts': [
'skypipe = skypipe.cli:run',]}
)
| #!/usr/bin/env python
import os
from setuptools import setup, find_packages
import skypipe
setup(
name='skypipe',
version=skypipe.VERSION,
author='Jeff Lindsay',
author_email='progrium@gmail.com',
description='Magic pipe in the sky',
long_description=open(os.path.join(os.path.dirname(__file__),
"README.md")).read().replace(':', '::'),
license='MIT',
classifiers=[
"Development Status :: 3 - Alpha",
"Topic :: Utilities",
"License :: OSI Approved :: MIT License",
],
url="http://github.com/progrium/skypipe",
packages=find_packages(),
install_requires=['pyzmq', 'dotcloud>=0.7', 'argparse'],
zip_safe=False,
package_data={
'skypipe': ['satellite/*']},
entry_points={
'console_scripts': [
'skypipe = skypipe.cli:run',]}
)
| Add dotcloud version requirement (I had 0.4.2 and that didn't work). | Add dotcloud version requirement (I had 0.4.2 and that didn't work).
| Python | mit | progrium/skypipe,progrium/skypipe | ---
+++
@@ -20,7 +20,7 @@
],
url="http://github.com/progrium/skypipe",
packages=find_packages(),
- install_requires=['pyzmq', 'dotcloud', 'argparse'],
+ install_requires=['pyzmq', 'dotcloud>=0.7', 'argparse'],
zip_safe=False,
package_data={
'skypipe': ['satellite/*']}, |
73b195cc952ade2b2e0a2c11d32616dfa3d8baa1 | yasibo/__init__.py | yasibo/__init__.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
def botcmd(*args, **kwargs):
"""
Decorator to declare a function as a bot command
Parameters:
name = Name of the command
admin = Sets the command to be an admin command
Admin commands do not work in public channels user must msg
bot directly.
"""
def create_botcmd(func, name=None, admin=False):
if not hasattr(func, '_botcmd'):
setattr(func, '_botcmd', True)
setattr(func, '_botcmd_name', name or func.__name__)
setattr(func, '_botcmd_admin', admin)
return func
if len(args):
return create_botcmd(args[0], **kwargs)
else:
return lambda func: create_botcmd(func, **kwargs)
| Add fucntion to create bot commands | Add fucntion to create bot commands
| Python | mpl-2.0 | Yasibo/yasibo | ---
+++
@@ -0,0 +1,29 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+def botcmd(*args, **kwargs):
+ """
+ Decorator to declare a function as a bot command
+
+ Parameters:
+ name = Name of the command
+ admin = Sets the command to be an admin command
+ Admin commands do not work in public channels user must msg
+ bot directly.
+ """
+
+ def create_botcmd(func, name=None, admin=False):
+ if not hasattr(func, '_botcmd'):
+ setattr(func, '_botcmd', True)
+ setattr(func, '_botcmd_name', name or func.__name__)
+ setattr(func, '_botcmd_admin', admin)
+ return func
+
+ if len(args):
+ return create_botcmd(args[0], **kwargs)
+ else:
+ return lambda func: create_botcmd(func, **kwargs) | |
e3765f9d109a6eacfce30a14ff01bd00a56c4c21 | setup.py | setup.py | from setuptools import setup
import jasinja, sys
requires = ['Jinja2']
if sys.version_info < (2, 6):
requires += ['simplejson']
setup(
name='jasinja',
version=jasinja.__version__,
url='http://bitbucket.org/djc/jasinja',
license='BSD',
author='Dirkjan Ochtman',
author_email='dirkjan@ochtman.nl',
description='A JavaScript code generator for Jinja templates',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=['jasinja', 'jasinja.tests'],
package_data={
'jasinja': ['*.js']
},
install_requires=requires,
test_suite='jasinja.tests.run.suite',
entry_points={
'console_scripts': ['jasinja-compile = jasinja.compile:main'],
},
)
| from setuptools import setup
import jasinja, sys
requires = ['Jinja2']
if sys.version_info < (2, 6):
requires += ['simplejson']
setup(
name='jasinja',
version=jasinja.__version__,
url='http://bitbucket.org/djc/jasinja',
license='BSD',
author='Dirkjan Ochtman',
author_email='dirkjan@ochtman.nl',
description='A JavaScript code generator for Jinja templates',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
],
packages=['jasinja', 'jasinja.tests'],
package_data={
'jasinja': ['*.js']
},
install_requires=requires,
test_suite='jasinja.tests.run.suite',
test_requires=['python-spidermonkey'],
entry_points={
'console_scripts': ['jasinja-compile = jasinja.compile:main'],
},
)
| Add test_requires dependency on python-spidermonkey. | Add test_requires dependency on python-spidermonkey.
| Python | bsd-3-clause | djc/jasinja,djc/jasinja | ---
+++
@@ -29,6 +29,7 @@
},
install_requires=requires,
test_suite='jasinja.tests.run.suite',
+ test_requires=['python-spidermonkey'],
entry_points={
'console_scripts': ['jasinja-compile = jasinja.compile:main'],
}, |
07c9b76d63714c431a983f0506ff71f19face3bd | astroquery/alma/tests/setup_package.py | astroquery/alma/tests/setup_package.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
def get_package_data():
paths = [os.path.join('data', '*.txt')]
return {'astroquery.alma.tests': paths}
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
import os
def get_package_data():
paths = [os.path.join('data', '*.txt'), os.path.join('data', '*.xml')]
return {'astroquery.alma.tests': paths}
| Include xml datafile for alma tests | Include xml datafile for alma tests
| Python | bsd-3-clause | imbasimba/astroquery,imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery | ---
+++
@@ -5,5 +5,5 @@
def get_package_data():
- paths = [os.path.join('data', '*.txt')]
+ paths = [os.path.join('data', '*.txt'), os.path.join('data', '*.xml')]
return {'astroquery.alma.tests': paths} |
c0ee0f27b21ed7c6eb97ad6b1fc1c7d72127c772 | audio_pipeline/tb_ui/util/Resources.py | audio_pipeline/tb_ui/util/Resources.py | import uuid
import os
from audio_pipeline.util.AudioFileFactory import AudioFileFactory
from audio_pipeline.util import Exceptions
mbid_directory = "Ready To Filewalk"
picard_directory = "Picard Me!"
cache_limit = 30
cancel = -1
checked = 1
unchecked = 0
def has_mbid(track):
"""
Check whether or not the given track has an MBID.
"""
if track.mbid.value:
try:
id = uuid.UUID(track.mbid.value)
good = True
except ValueError as e:
good = False
else:
good = False
return good
def is_release(directory):
d = os.path.split(directory)[1]
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
for f in os.scandir(directory):
if f.is_file:
file_name = f.name
try:
track = AudioFileFactory.get(f.path)
except IOError:
track = False
continue
except Exceptions.UnsupportedFiletypeError:
track = False
continue
break
return track
| import uuid
import os
from audio_pipeline.util.AudioFileFactory import AudioFileFactory
from audio_pipeline.util import Exceptions
mbid_directory = "Ready To Filewalk"
picard_directory = "Picard Me!"
cache_limit = 30
cancel = -1
checked = 1
unchecked = 0
def has_mbid(track):
"""
Check whether or not the given track has an MBID.
"""
if track.mbid.value:
try:
id = uuid.UUID(track.mbid.value)
good = True
except ValueError as e:
good = False
else:
good = False
return good
def is_release(directory):
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
for f in os.listdir(directory):
file_path = os.path.join(directory, f)
if os.path.isfile(file_path):
try:
track = AudioFileFactory.get(file_path)
except IOError:
track = False
continue
except Exceptions.UnsupportedFiletypeError:
track = False
continue
break
return track
| Remove os.scandir usage (not in python 3.4) | Remove os.scandir usage (not in python 3.4)
| Python | mit | hidat/audio_pipeline | ---
+++
@@ -29,19 +29,19 @@
return good
+
def is_release(directory):
- d = os.path.split(directory)[1]
track = False
# we'll set this to a DBPOWERAMP config later
#if InputPatterns.release_pattern.match(d):
- for f in os.scandir(directory):
- if f.is_file:
- file_name = f.name
+ for f in os.listdir(directory):
+ file_path = os.path.join(directory, f)
+ if os.path.isfile(file_path):
try:
- track = AudioFileFactory.get(f.path)
+ track = AudioFileFactory.get(file_path)
except IOError:
track = False
continue |
dd55baacdda9e0a88ec6924bbe29ad3f4bb30d21 | ghost.py | ghost.py | # creating file
| # ghost.py
# globals
g_words = dict()
_END = "_END"
def addWord(word):
# check if subset is already in the dictionary
cur = g_words
for char in word:
if char in cur:
cur = cur[char]
elif _END in cur:
return
else
break
# add word to dictionary
cur = g_words
for char in word:
cur = cur.setdefault(char, dict())
cur[_END] = _END
def main(argv):
# fill dictionary
with open("./words.txt") as wordfile:
for word in wordfile:
addWord(word)
if len(argv) > 0 and argv[0] == "-c":
# solo mode random
elif len(argv) > 1 and argv[0] == "-l":
firstletter = argv[1][0]
# solo mode with letter decided
else:
# normal human vs computer
if __name__ == "__main__":
main(sys.argv[1:])
| Set up framework, need to work on strategy | Set up framework, need to work on strategy
| Python | cc0-1.0 | tobiaselder/ghost | ---
+++
@@ -1 +1,43 @@
-# creating file
+# ghost.py
+
+# globals
+g_words = dict()
+_END = "_END"
+
+def addWord(word):
+ # check if subset is already in the dictionary
+ cur = g_words
+ for char in word:
+ if char in cur:
+ cur = cur[char]
+ elif _END in cur:
+ return
+ else
+ break
+
+ # add word to dictionary
+ cur = g_words
+ for char in word:
+ cur = cur.setdefault(char, dict())
+ cur[_END] = _END
+
+def main(argv):
+ # fill dictionary
+ with open("./words.txt") as wordfile:
+ for word in wordfile:
+ addWord(word)
+
+ if len(argv) > 0 and argv[0] == "-c":
+ # solo mode random
+ elif len(argv) > 1 and argv[0] == "-l":
+ firstletter = argv[1][0]
+ # solo mode with letter decided
+ else:
+ # normal human vs computer
+
+
+
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:]) |
0e6ec8edc9502918b8b9ae01e14c7f485d6b261d | Init.py | Init.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ***************************************************************************
# * *
# * Copyright (c) 2016 execuc *
# * *
# * This file is part of LCInterlocking module. *
# * LCInterlocking module is free software; you can redistribute it and/or*
# * modify it under the terms of the GNU Lesser General Public *
# * License as published by the Free Software Foundation; either *
# * version 2.1 of the License, or (at your option) any later version. *
# * *
# * This module is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
# * Lesser General Public License for more details. *
# * *
# * You should have received a copy of the GNU Lesser General Public *
# * License along with this library; if not, write to the Free Software *
# * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, *
# * MA 02110-1301 USA *
# * *
# ***************************************************************************
print "Interlocking laser cut workbench loaded"
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# ***************************************************************************
# * *
# * Copyright (c) 2016 execuc *
# * *
# * This file is part of LCInterlocking module. *
# * LCInterlocking module is free software; you can redistribute it and/or*
# * modify it under the terms of the GNU Lesser General Public *
# * License as published by the Free Software Foundation; either *
# * version 2.1 of the License, or (at your option) any later version. *
# * *
# * This module is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU *
# * Lesser General Public License for more details. *
# * *
# * You should have received a copy of the GNU Lesser General Public *
# * License along with this library; if not, write to the Free Software *
# * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, *
# * MA 02110-1301 USA *
# * *
# ***************************************************************************
print("Interlocking laser cut workbench loaded")
| Fix missing parentheses initialization error | Fix missing parentheses initialization error | Python | lgpl-2.1 | execuc/LCInterlocking | ---
+++
@@ -23,4 +23,4 @@
# * *
# ***************************************************************************
-print "Interlocking laser cut workbench loaded"
+print("Interlocking laser cut workbench loaded") |
8ecd9c4605e92946941fe29a837b0b1855232135 | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name = "idea-collection",
version = "0.1",
#url = "TBD",
license = "public domain",
description = "An idea collection tool for django",
author = "Jui Dai, Jennifer Ehlers, David Kennedy, Shashank Khandelwal, CM Lubinski",
packages = find_packages('src'),
package_dir = {'':'src'},
install_requires = ['setuptools'],
)
| from setuptools import setup, find_packages
setup(
name = "idea-collection",
version = "0.1.0",
#url = "TBD",
license = "public domain",
description = "An idea collection tool for django",
author = "Jui Dai, Jennifer Ehlers, David Kennedy, Shashank Khandelwal, CM Lubinski",
packages = find_packages('src'),
package_dir = {'':'src'},
install_requires = ['setuptools'],
)
| Use 3-point version number as per semver.org | Use 3-point version number as per semver.org
| Python | cc0-1.0 | cfpb/idea-box,18F/idea-box,cfpb/idea-box,m3brown/idea-box,geomapdev/idea-box,geomapdev/idea-box,CapeSepias/idea-box,CapeSepias/idea-box,CapeSepias/idea-box,cmc333333/idea-box,cmc333333/idea-box,geomapdev/idea-box,m3brown/idea-box,18F/idea-box,cfpb/idea-box,18F/idea-box,cmc333333/idea-box | ---
+++
@@ -2,7 +2,7 @@
setup(
name = "idea-collection",
- version = "0.1",
+ version = "0.1.0",
#url = "TBD",
license = "public domain",
description = "An idea collection tool for django", |
9bd2a3951dca5e014799f31da30b4814af2b7a3b | setup.py | setup.py | from setuptools import find_packages, setup
setup(
name="clipspy",
version="0.0.1",
author="Matteo Cafasso",
author_email="noxdafox@gmail.com",
description=("CLIPS Python bindings."),
packages=find_packages(),
ext_package="clips",
setup_requires=["cffi>=1.0.0"],
install_requires=["cffi>=1.0.0"],
cffi_modules=["clips_build.py:ffibuilder"],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License"
]
)
| from setuptools import find_packages, setup
setup(
name="clipspy",
version="0.0.1",
author="Matteo Cafasso",
author_email="noxdafox@gmail.com",
description=("CLIPS Python bindings."),
packages=find_packages(),
ext_package="clips",
setup_requires=["cffi>=1.0.0"],
install_requires=["cffi>=1.0.0", "enum34"],
cffi_modules=["clips_build.py:ffibuilder"],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License"
]
)
| Add Enum backport dependency for python 2 | Add Enum backport dependency for python 2
Signed-off-by: Matteo Cafasso <43c758619f0ced9dc5c51a50803f3466c86e4c3d@gmail.com>
| Python | bsd-3-clause | noxdafox/clipspy,noxdafox/clipspy | ---
+++
@@ -9,7 +9,7 @@
packages=find_packages(),
ext_package="clips",
setup_requires=["cffi>=1.0.0"],
- install_requires=["cffi>=1.0.0"],
+ install_requires=["cffi>=1.0.0", "enum34"],
cffi_modules=["clips_build.py:ffibuilder"],
classifiers=[
"Programming Language :: Python", |
86c3859838c93df1bbe90bce8abc9971f0affe1f | setup.py | setup.py | from setuptools import setup
setup(name='turtleart',
version='0.2',
description='Library to facilitate Turtle Art',
author='Ava and Teo Lisitza',
author_email='mlisitza+github@gmail.com',
packages=['turtleart', 'turtleart.scripts'],
entry_points={
'console_scripts': [
'qturtle = turtleart.scripts.qturtle:main'
]
},
)
| from setuptools import setup
setup(name='turtleart',
version='0.2',
description='Library to facilitate Turtle Art',
author='Ava and Teo Lisitza',
author_email='mlisitza+github@gmail.com',
packages=['turtleart', 'turtleart.scripts'],
entry_points={
'console_scripts': [
'qturtle = turtleart.scripts.qturtle:main'
]
},
install_requires=[
'ipython',
]
)
| Make turtleart package require ipython | Make turtleart package require ipython
| Python | apache-2.0 | teolisitza/turtleart | ---
+++
@@ -11,4 +11,7 @@
'qturtle = turtleart.scripts.qturtle:main'
]
},
+ install_requires=[
+ 'ipython',
+ ]
) |
e751cb4f4805aed079fc025b9b1655f30cf5e69a | watson/html/entities.py | watson/html/entities.py | # -*- coding: utf-8 -*-
import re
from html import _escape_map_full
from html.entities import codepoint2name
html_entities = {_ord: '&{0};'.format(value)
for _ord, value in codepoint2name.items()}
html_entities.update(_escape_map_full)
entities_html = {value: _ord for _ord, value in html_entities.items()}
def encode(string):
"""Encodes html entities.
This is a little more full featured than html.escape, as it will
replace all charactes from codepoint2name.
Returns:
string with replaced html entities.
"""
return string.translate(html_entities)
def decode(string):
"""Decodes html entities.
Returns:
string with html entities decoded.
"""
return (
re.sub(
'&(?:[#a-z][a-z0-9]+);',
lambda m: chr(entities_html[m.group()]),
string)
)
| # -*- coding: utf-8 -*-
import re
from html.entities import codepoint2name
try:
from html import _escape_map_full
except:
# taken from the 3.3 standard lib, as it's removed in 3.4
_escape_map_full = {ord('&'): '&', ord('<'): '<', ord('>'): '>',
ord('"'): '"', ord('\''): '''}
html_entities = {_ord: '&{0};'.format(value)
for _ord, value in codepoint2name.items()}
html_entities.update(_escape_map_full)
entities_html = {value: _ord for _ord, value in html_entities.items()}
def encode(string):
"""Encodes html entities.
This is a little more full featured than html.escape, as it will
replace all charactes from codepoint2name.
Returns:
string with replaced html entities.
"""
return string.translate(html_entities)
def decode(string):
"""Decodes html entities.
Returns:
string with html entities decoded.
"""
return (
re.sub(
'&(?:[#a-z][a-z0-9]+);',
lambda m: chr(entities_html[m.group()]),
string)
)
| Fix for Python 3.4 html module not containing _escape_map_full | Fix for Python 3.4 html module not containing _escape_map_full
| Python | bsd-3-clause | watsonpy/watson-html | ---
+++
@@ -1,7 +1,13 @@
# -*- coding: utf-8 -*-
import re
-from html import _escape_map_full
from html.entities import codepoint2name
+
+try:
+ from html import _escape_map_full
+except:
+ # taken from the 3.3 standard lib, as it's removed in 3.4
+ _escape_map_full = {ord('&'): '&', ord('<'): '<', ord('>'): '>',
+ ord('"'): '"', ord('\''): '''}
html_entities = {_ord: '&{0};'.format(value)
for _ord, value in codepoint2name.items()} |
9a003165301e60ee4486b0a8bdde79e84eef65d8 | rparse.py | rparse.py | #!/usr/bin/env python
# Copyright 2015, Dmitry Veselov
from plyplus import Grammar, ParseError
try:
# Python 2.x and pypy
from itertools import imap as map
except ImportError:
# Python 3.x already have lazy map
pass
__all__ = [
"parse"
]
grammar = Grammar(r"""
start : package ;
package: name vspec? ;
name : string ;
vspec : comparison version (',' comparison version)* ;
comparison : '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '===' ;
version : string ;
@string : '[-A-Za-z0-9_\.]+' ;
SPACES: '[ \t\n]+' (%ignore) (%newline);
""")
def _parse(requirement, g=grammar):
try:
return g.parse(requirement)
except ParseError:
message = "Invalid requirements line: '{0}'" \
.format(requirement.strip())
raise ValueError(message)
def parse(requirements):
"""
Parses given requirements line-by-line.
"""
return map(_parse, filter(None, requirements.splitlines()))
| #!/usr/bin/env python
# Copyright 2015, Dmitry Veselov
from plyplus import Grammar, STransformer, ParseError
try:
# Python 2.x and pypy
from itertools import imap as map
except ImportError:
# Python 3.x already have lazy map
pass
__all__ = [
"parse"
]
grammar = Grammar(r"""
@start : package ;
package: name vspec? ;
name : string ;
vspec : comparison version (',' comparison version)* ;
comparison : '<' | '<=' | '!=' | '==' | '>=' | '>' | '~=' | '===' ;
version : string ;
@string : '[-A-Za-z0-9_\.]+' ;
SPACES: '[ \t\n]+' (%ignore) (%newline);
""")
class RTransformer(STransformer):
def package(self, node):
if len(node.tail) == 2:
name, vspec = node.tail
else:
name, vspec = node.tail[0], None
return name, vspec
def name(self, node):
return node.tail[0]
def vspec(self, node):
comparisons, versions = node.tail[0::2], node.tail[1::2]
return list(zip(comparisons, versions))
def comparison(self, node):
return node.tail[0]
def version(self, node):
return node.tail[0]
def _parse(requirement, g=grammar):
try:
return g.parse(requirement)
except ParseError:
message = "Invalid requirements line: '{0}'" \
.format(requirement.strip())
raise ValueError(message)
def parse(requirements):
"""
Parses given requirements line-by-line.
"""
transformer = RTransformer()
return map(transformer.transform, map(_parse, filter(None, requirements.splitlines())))
| Transform AST to python objects | Transform AST to python objects
| Python | mit | dveselov/rparse | ---
+++
@@ -1,7 +1,7 @@
#!/usr/bin/env python
# Copyright 2015, Dmitry Veselov
-from plyplus import Grammar, ParseError
+from plyplus import Grammar, STransformer, ParseError
try:
# Python 2.x and pypy
from itertools import imap as map
@@ -16,7 +16,7 @@
grammar = Grammar(r"""
-start : package ;
+@start : package ;
package: name vspec? ;
@@ -33,6 +33,29 @@
""")
+class RTransformer(STransformer):
+
+ def package(self, node):
+ if len(node.tail) == 2:
+ name, vspec = node.tail
+ else:
+ name, vspec = node.tail[0], None
+ return name, vspec
+
+ def name(self, node):
+ return node.tail[0]
+
+ def vspec(self, node):
+ comparisons, versions = node.tail[0::2], node.tail[1::2]
+ return list(zip(comparisons, versions))
+
+ def comparison(self, node):
+ return node.tail[0]
+
+ def version(self, node):
+ return node.tail[0]
+
+
def _parse(requirement, g=grammar):
try:
return g.parse(requirement)
@@ -46,4 +69,5 @@
"""
Parses given requirements line-by-line.
"""
- return map(_parse, filter(None, requirements.splitlines()))
+ transformer = RTransformer()
+ return map(transformer.transform, map(_parse, filter(None, requirements.splitlines()))) |
0e472a135a22470f87a90f941ddfbb0a1cfd3a70 | setup.py | setup.py | import os
import re
from pathlib import Path
from setuptools import setup
project_path = Path(__file__).parent
README = (project_path / 'README.md').read_text()
version_text = (project_path / 'django_admin_json_editor' / '__init__.py').read_text()
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(version_text).group(1)
# allow setup.py to be run from any path
os.chdir(project_path)
setup(
name='django-admin-json-editor',
version=VERSION,
packages=['django_admin_json_editor'],
include_package_data=True,
license='MIT License',
description='A simple Django app to add JSON widget into Django Administration.',
long_description=README,
url='https://github.com/abogushov/django-admin-json-editor',
author='Alexander Bogushov',
author_email='abogushov@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django'],
)
| import os
import re
from setuptools import setup
project_path = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(project_path, 'README.md'), 'r') as fout:
README = fout.read()
with open(os.path.join(project_path, 'django_admin_json_editor', '__init__.py'), 'r') as fout:
version_text = fout.read()
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(version_text).group(1)
# allow setup.py to be run from any path
os.chdir(project_path)
setup(
name='django-admin-json-editor',
version=VERSION,
packages=['django_admin_json_editor'],
include_package_data=True,
license='MIT License',
description='A simple Django app to add JSON widget into Django Administration.',
long_description=README,
url='https://github.com/abogushov/django-admin-json-editor',
author='Alexander Bogushov',
author_email='abogushov@gmail.com',
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
],
install_requires=['Django'],
)
| Make installation python 2.7 compatible | Make installation python 2.7 compatible
| Python | mit | abogushov/django-admin-json-editor,abogushov/django-admin-json-editor | ---
+++
@@ -1,14 +1,14 @@
import os
import re
-from pathlib import Path
from setuptools import setup
-project_path = Path(__file__).parent
+project_path = os.path.abspath(os.path.dirname(__file__))
+with open(os.path.join(project_path, 'README.md'), 'r') as fout:
+ README = fout.read()
+with open(os.path.join(project_path, 'django_admin_json_editor', '__init__.py'), 'r') as fout:
+ version_text = fout.read()
-README = (project_path / 'README.md').read_text()
-
-version_text = (project_path / 'django_admin_json_editor' / '__init__.py').read_text()
VERSION = re.compile(r'.*__version__ = \'(.*?)\'', re.S).match(version_text).group(1)
# allow setup.py to be run from any path |
2c4964026e07f572ccd2213b2093a39b99a8888a | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='panoptescli',
version='1.1.1',
url='https://github.com/zooniverse/panoptes-cli',
author='Adam McMaster',
author_email='adam@zooniverse.org',
description=(
'A command-line client for Panoptes, the API behind the Zooniverse'
),
packages=find_packages(),
include_package_data=True,
install_requires=[
'Click>=6.7,<7.1',
'PyYAML>=5.1,<5.3',
'panoptes-client>=1.0,<2.0',
'humanize>=0.5.1,<0.6',
'pathvalidate>=0.29.0,<0.30',
],
entry_points='''
[console_scripts]
panoptes=panoptes_cli.scripts.panoptes:cli
''',
)
| from setuptools import setup, find_packages
setup(
name='panoptescli',
version='1.1.1',
url='https://github.com/zooniverse/panoptes-cli',
author='Adam McMaster',
author_email='adam@zooniverse.org',
description=(
'A command-line client for Panoptes, the API behind the Zooniverse'
),
packages=find_packages(),
include_package_data=True,
install_requires=[
'Click>=6.7,<7.1',
'PyYAML>=5.1,<5.4',
'panoptes-client>=1.0,<2.0',
'humanize>=0.5.1,<0.6',
'pathvalidate>=0.29.0,<0.30',
],
entry_points='''
[console_scripts]
panoptes=panoptes_cli.scripts.panoptes:cli
''',
)
| Update pyyaml requirement from <5.3,>=5.1 to >=5.1,<5.4 | Update pyyaml requirement from <5.3,>=5.1 to >=5.1,<5.4
Updates the requirements on [pyyaml](https://github.com/yaml/pyyaml) to permit the latest version.
- [Release notes](https://github.com/yaml/pyyaml/releases)
- [Changelog](https://github.com/yaml/pyyaml/blob/master/CHANGES)
- [Commits](https://github.com/yaml/pyyaml/compare/5.1...5.3)
Signed-off-by: dependabot-preview[bot] <5bdcd3c0d4d24ae3e71b3b452a024c6324c7e4bb@dependabot.com> | Python | apache-2.0 | zooniverse/panoptes-cli | ---
+++
@@ -13,7 +13,7 @@
include_package_data=True,
install_requires=[
'Click>=6.7,<7.1',
- 'PyYAML>=5.1,<5.3',
+ 'PyYAML>=5.1,<5.4',
'panoptes-client>=1.0,<2.0',
'humanize>=0.5.1,<0.6',
'pathvalidate>=0.29.0,<0.30', |
82316ec166b4ab64f1890f33e8b9b75bd6733e53 | fabfile/eg.py | fabfile/eg.py | from fabric.api import task, local, run, lcd, cd, env
from os.path import exists as file_exists
from fabtools.python import virtualenv
from os import path
PWD = path.join(path.dirname(__file__), '..')
VENV_DIR = path.join(PWD, '.env')
@task
def mnist():
with virtualenv(VENV_DIR):
with lcd(PWD):
local('pip install -e .')
local('python examples/mnist.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR):
with lcd(PWD):
local('pip install -e .')
local('mkdir data')
install_ancora()
local('python examples/basic_tagger.py')
| from fabric.api import task, local, run, lcd, cd, env
from os.path import exists as file_exists
from fabtools.python import virtualenv
from os import path
PWD = path.join(path.dirname(__file__), '..')
VENV_DIR = path.join(PWD, '.env')
@task
def mnist():
with virtualenv(VENV_DIR):
with lcd(PWD):
local('pip install -e .')
local('python examples/mnist.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR):
with lcd(PWD):
local('pip install -e .')
local('python examples/basic_tagger.py')
| Remove dataset installation from fabfile | Remove dataset installation from fabfile
| Python | mit | spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc | ---
+++
@@ -21,6 +21,4 @@
with virtualenv(VENV_DIR):
with lcd(PWD):
local('pip install -e .')
- local('mkdir data')
- install_ancora()
local('python examples/basic_tagger.py') |
551335ba6cd219cd90bf7419bb73804bb5851c64 | typescript/commands/build.py | typescript/commands/build.py | import sublime_plugin
import sublime
import os
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
def run(self):
file_name = self.window.active_view().file_name()
directory = os.path.dirname(file_name)
if "tsconfig.json" in os.listdir(directory):
self.window.run_command("exec", {
"shell_cmd": "tsc",
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
})
else:
sublime.active_window().show_input_panel(
"Build parameters: ",
"", # initial text
self.compile_inferred_project,
None, # on change
None # on cancel
)
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
self.window.run_command("exec", {
"shell_cmd": "tsc {0} {1}".format(file_name, params),
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
})
| import sublime_plugin
import sublime
import os
from ..libs.global_vars import IS_ST2
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
def run(self):
file_name = self.window.active_view().file_name()
directory = os.path.dirname(file_name)
if "tsconfig.json" in os.listdir(directory):
self.window.run_command("exec", {
"cmd": "tsc",
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
"shell": True
})
else:
sublime.active_window().show_input_panel(
"Build parameters: ",
"", # initial text
self.compile_inferred_project,
None, # on change
None # on cancel
)
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
if not IS_ST2:
cmd = "tsc {0} {1}".format(file_name, params)
self.window.run_command("exec", {
"shell_cmd": cmd,
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
})
else:
cmd = "tsc {0} {1}".format(file_name, params)
self.window.run_command("exec", {
"cmd": [cmd],
"file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
"shell": True
})
| Add support for sublime 2 | Add support for sublime 2
| Python | apache-2.0 | RyanCavanaugh/TypeScript-Sublime-Plugin,kungfusheep/TypeScript-Sublime-Plugin,Microsoft/TypeScript-Sublime-Plugin,zhengbli/TypeScript-Sublime-Plugin,fongandrew/TypeScript-Sublime-JSX-Plugin,Microsoft/TypeScript-Sublime-Plugin,zhengbli/TypeScript-Sublime-Plugin,hoanhtien/TypeScript-Sublime-Plugin,zhengbli/TypeScript-Sublime-Plugin,fongandrew/TypeScript-Sublime-JSX-Plugin,Microsoft/TypeScript-Sublime-Plugin,RyanCavanaugh/TypeScript-Sublime-Plugin,hoanhtien/TypeScript-Sublime-Plugin,fongandrew/TypeScript-Sublime-JSX-Plugin,kungfusheep/TypeScript-Sublime-Plugin,hoanhtien/TypeScript-Sublime-Plugin,kungfusheep/TypeScript-Sublime-Plugin,RyanCavanaugh/TypeScript-Sublime-Plugin | ---
+++
@@ -1,6 +1,7 @@
import sublime_plugin
import sublime
import os
+from ..libs.global_vars import IS_ST2
class TypescriptBuildCommand(sublime_plugin.WindowCommand):
@@ -9,8 +10,9 @@
directory = os.path.dirname(file_name)
if "tsconfig.json" in os.listdir(directory):
self.window.run_command("exec", {
- "shell_cmd": "tsc",
- "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
+ "cmd": "tsc",
+ "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
+ "shell": True
})
else:
sublime.active_window().show_input_panel(
@@ -23,7 +25,16 @@
def compile_inferred_project(self, params=""):
file_name = self.window.active_view().file_name()
- self.window.run_command("exec", {
- "shell_cmd": "tsc {0} {1}".format(file_name, params),
- "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
- })
+ if not IS_ST2:
+ cmd = "tsc {0} {1}".format(file_name, params)
+ self.window.run_command("exec", {
+ "shell_cmd": cmd,
+ "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$"
+ })
+ else:
+ cmd = "tsc {0} {1}".format(file_name, params)
+ self.window.run_command("exec", {
+ "cmd": [cmd],
+ "file_regex": "^(.+?)\\((\\d+),(\\d+)\\): (.+)$",
+ "shell": True
+ }) |
89a700b49d89a711f44a834d879c5edef959c195 | setup.py | setup.py | from setuptools import find_packages, setup
import sr.comp.scheduler.metadata as md
with open('README.rst') as f:
long_description = f.read()
setup(name='sr.comp.scheduler',
version=md.VERSION,
packages=find_packages(),
namespace_packages=['sr', 'sr.comp'],
description=md.DESCRIPTION,
long_description=long_description,
entry_points={
'console_scripts': ['sr-comp-schedule=sr.comp.scheduler.scheduler:cli_main'],
},
license='MIT',
author='Student Robotics Competition Software SIG',
author_email='srobo-devel@googlegroups.com',
install_requires=['PyYAML >=3.11, <4'],
setup_requires=[
'Sphinx >=1.3b, <2',
'sphinx-argparse >=0.1.13, <0.2'
],
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
])
| from setuptools import find_packages, setup
import sr.comp.scheduler.metadata as md
with open('README.rst') as f:
long_description = f.read()
setup(name='sr.comp.scheduler',
version=md.VERSION,
packages=find_packages(),
namespace_packages=['sr', 'sr.comp'],
description=md.DESCRIPTION,
long_description=long_description,
entry_points={
'console_scripts': ['sr-comp-schedule=sr.comp.scheduler.scheduler:cli_main'],
},
license='MIT',
author='Student Robotics Competition Software SIG',
author_email='srobo-devel@googlegroups.com',
install_requires=['PyYAML >=3.11, <4'],
setup_requires=[
'Sphinx >=1.2, <2',
'sphinx-argparse >=0.1.13, <0.2'
],
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
])
| Drop the lower-bound on the Sphinx version | Drop the lower-bound on the Sphinx version
| Python | mit | prophile/sr-scheduler-2015 | ---
+++
@@ -18,7 +18,7 @@
author_email='srobo-devel@googlegroups.com',
install_requires=['PyYAML >=3.11, <4'],
setup_requires=[
- 'Sphinx >=1.3b, <2',
+ 'Sphinx >=1.2, <2',
'sphinx-argparse >=0.1.13, <0.2'
],
zip_safe=True, |
43ad157b058693fd38280a802ed4da92301ef4f1 | setup.py | setup.py | import re
from setuptools import setup
with open('mashdown/__init__.py') as f:
version = re.search(
r'(?<=__version__ = \')\d\.\d\.\d(?=\')',
f.read()
).group()
with open('README.rst') as f:
readme = f.read()
setup(
name=u'mashdown',
version=version,
description=u'Splits a youtube mashup video in a list of tagged audio files',
long_description=readme,
author=u'Balthazar Rouberol',
author_email=u'brouberol@imap.cc',
license='License :: OSI Approved :: MIT License',
packages=['mashdown'],
install_requires=['pydub', 'pafy', 'mutagen'],
entry_points={
'console_scripts': ['mashdown=main:main']
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: TODO',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Multimedia :: Sound/Audio :: Conversion',
'Topic :: Multimedia :: Video :: Conversion'
],
zip_safe=False,
)
| import re
from setuptools import setup
with open('mashdown/__init__.py') as f:
version = re.search(
r'(?<=__version__ = \')\d\.\d\.\d(?=\')',
f.read()
).group()
with open('README.rst') as f:
readme = f.read()
setup(
name=u'mashdown',
version=version,
description=u'Splits a youtube mashup video in a list of tagged audio files',
long_description=readme,
author=u'Balthazar Rouberol',
author_email=u'brouberol@imap.cc',
license='License :: OSI Approved :: MIT License',
packages=['mashdown'],
install_requires=['pydub', 'pafy', 'mutagen'],
entry_points={
'console_scripts': ['mashdown=main:main']
},
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Multimedia :: Sound/Audio :: Conversion',
'Topic :: Multimedia :: Video :: Conversion'
],
zip_safe=False,
)
| Add a proper License PyPI classifier | Add a proper License PyPI classifier
| Python | mit | brouberol/mashdown | ---
+++
@@ -29,7 +29,7 @@
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
- 'License :: TODO',
+ 'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7', |
d0472714a97af951250f68c7c22e57448f689291 | setup.py | setup.py | from setuptools import setup
import os
data = list()
for d in os.walk('markdo/'):
if len(d[2]) > 0:
path_list = map(
lambda x: str.join('/', os.path.join(d[0], x).split('/')[1:]),
d[2]
)
data.extend(path_list)
setup(
name="MarkDo",
version="0.2",
author="Nitipit Nontasuwan",
author_email="nitipit@gmail.com",
url="http://nitipit.github.com/markdo/",
license="MIT",
description="Markdown editor for Gnome",
platforms=['linux'],
keywords=['editor', 'markdown'],
package_dir={'markdo': 'markdo'},
packages=['markdo'],
scripts=['markdo/markdo'],
package_data={'markdo': data},
install_requires=['appkit==0.2'],
)
| from setuptools import setup
import os
data = list()
for d in os.walk('markdo/'):
if len(d[2]) > 0:
path_list = map(
lambda x: str.join('/', os.path.join(d[0], x).split('/')[1:]),
d[2]
)
data.extend(path_list)
setup(
name="MarkDo",
version="0.2",
author="Nitipit Nontasuwan",
author_email="nitipit@gmail.com",
url="http://nitipit.github.com/markdo/",
license="MIT",
description="Markdown editor for Gnome",
platforms=['linux'],
keywords=['editor', 'markdown'],
package_dir={'markdo': 'markdo'},
packages=['markdo'],
scripts=['markdo/markdo'],
package_data={'markdo': data},
install_requires=['AppKit==0.2', 'Jinja2'],
)
| Add jinja2 as a required lib | Add jinja2 as a required lib
| Python | mit | nitipit/markdo,nitipit/markdo,nitipit/markdo | ---
+++
@@ -24,5 +24,5 @@
packages=['markdo'],
scripts=['markdo/markdo'],
package_data={'markdo': data},
- install_requires=['appkit==0.2'],
+ install_requires=['AppKit==0.2', 'Jinja2'],
) |
73c5ec2b477dc172dedf0b30dae16665babb14f6 | setup.py | setup.py | import sys
import os
from setuptools import setup
long_description = open('README.rst').read()
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
setup_kwargs = dict(
name='powershift-cluster',
version='1.1.5',
description='PowerShift command plugin for creating OpenShift clusters.',
long_description=long_description,
url='https://github.com/getwarped/powershift-cluster',
author='Graham Dumpleton',
author_email='Graham.Dumpleton@gmail.com',
license='BSD',
classifiers=classifiers,
keywords='openshift kubernetes',
packages=['powershift', 'powershift.cluster'],
package_dir={'powershift': 'src/powershift'},
install_requires=['passlib'],
extras_require={'cli': ['powershift-cli>=1.1.1']},
entry_points = {'powershift_cli_plugins': ['cluster = powershift.cluster']},
)
setup(**setup_kwargs)
| import sys
import os
from setuptools import setup
long_description = open('README.rst').read()
classifiers = [
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
]
setup_kwargs = dict(
name='powershift-cluster',
version='1.1.5',
description='PowerShift command plugin for creating OpenShift clusters.',
long_description=long_description,
url='https://github.com/getwarped/powershift-cluster',
author='Graham Dumpleton',
author_email='Graham.Dumpleton@gmail.com',
license='BSD',
classifiers=classifiers,
keywords='openshift kubernetes',
packages=['powershift', 'powershift.cluster'],
package_dir={'powershift': 'src/powershift'},
install_requires=['passlib'],
extras_require={'cli': ['powershift-cli>=1.1.8']},
entry_points = {'powershift_cli_plugins': ['cluster = powershift.cluster']},
)
setup(**setup_kwargs)
| Update minimum version of cli package. | Update minimum version of cli package.
| Python | bsd-2-clause | getwarped/powershift-cluster,getwarped/powershift-cluster | ---
+++
@@ -31,7 +31,7 @@
packages=['powershift', 'powershift.cluster'],
package_dir={'powershift': 'src/powershift'},
install_requires=['passlib'],
- extras_require={'cli': ['powershift-cli>=1.1.1']},
+ extras_require={'cli': ['powershift-cli>=1.1.8']},
entry_points = {'powershift_cli_plugins': ['cluster = powershift.cluster']},
)
|
0ca332f81d8c5be094855f32971da40bed991540 | setup.py | setup.py | from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
]
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
| import sys
from setuptools import setup, find_packages
import populous
requirements = [
"click",
"cached-property",
"fake-factory",
]
if sys.version_info < (3, 2):
requirements.append('functools32')
setup(
name="populous",
version=populous.__version__,
url=populous.__url__,
description=populous.__doc__,
author=populous.__author__,
license=populous.__license__,
long_description="TODO",
packages=find_packages(),
install_requires=requirements,
entry_points={
'console_scripts': [
'populous = populous.__main__:cli'
]
},
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Utilities",
],
keywords='populous populate database',
)
| Add functools32 to the requirements for Python < 3.2 | Add functools32 to the requirements for Python < 3.2
| Python | mit | novafloss/populous | ---
+++
@@ -1,3 +1,5 @@
+import sys
+
from setuptools import setup, find_packages
import populous
@@ -7,6 +9,9 @@
"cached-property",
"fake-factory",
]
+
+if sys.version_info < (3, 2):
+ requirements.append('functools32')
setup(
name="populous", |
b49314c656db718c17ad8beadedf95b365ca7fc5 | setup.py | setup.py | # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.19',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| # -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.20',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
| Update the PyPI version to 0.2.20. | Update the PyPI version to 0.2.20.
| Python | mit | Doist/todoist-python | ---
+++
@@ -10,7 +10,7 @@
setup(
name='todoist-python',
- version='0.2.19',
+ version='0.2.20',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com', |
20fd827bdfb80e435065ada50174c6085780889a | setup.py | setup.py | from setuptools import setup
with open('README.md') as f:
readme = f.read()
setup(
name='watson',
version='1.0.0',
packages=['watson'],
author='TailorDev',
author_email='contact@tailordev.com',
license='MIT',
long_description=readme,
install_requires=[
'Click',
'arrow',
'requests',
],
entry_points={
'console_scripts': [
'watson = watson.__main__:cli',
]
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Customer Service",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"Intended Audience :: Other Audience",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Office/Business",
"Topic :: Utilities",
],
keywords='watson time-tracking time tracking monitoring report',
)
| from setuptools import setup
with open('README.md') as f:
readme = f.read()
setup(
name='td-watson',
version='1.0.0',
packages=['watson'],
author='TailorDev',
author_email='contact@tailordev.com',
license='MIT',
long_description=readme,
install_requires=[
'Click',
'arrow',
'requests',
],
entry_points={
'console_scripts': [
'watson = watson.__main__:cli',
]
},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Customer Service",
"Intended Audience :: End Users/Desktop",
"Intended Audience :: Information Technology",
"Intended Audience :: Science/Research",
"Intended Audience :: Other Audience",
"License :: OSI Approved :: MIT License",
"Environment :: Console",
"Operating System :: MacOS",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Office/Business",
"Topic :: Utilities",
],
keywords='watson time-tracking time tracking monitoring report',
)
| Rename the PyPI package to td-watson | Rename the PyPI package to td-watson
| Python | mit | TailorDev/Watson,wehlutyk/Watson,yloiseau/Watson,TailorDev/Watson | ---
+++
@@ -5,7 +5,7 @@
setup(
- name='watson',
+ name='td-watson',
version='1.0.0',
packages=['watson'],
author='TailorDev', |
e2b77a2c98cbd51c5c0546e4146dd60af0a64c86 | comics/accounts/admin.py | comics/accounts/admin.py | from django.contrib import admin
from comics.accounts import models
class SubscriptionInline(admin.StackedInline):
model = models.Subscription
extra = 1
def email(obj):
return obj.user.email
class UserProfileAdmin(admin.ModelAdmin):
list_display = ('user', email, 'secret_key')
inlines = [SubscriptionInline,]
readonly_fields = ('user',)
admin.site.register(models.UserProfile, UserProfileAdmin)
| from django.contrib import admin
from comics.accounts import models
class SubscriptionInline(admin.StackedInline):
model = models.Subscription
extra = 1
def email(obj):
return obj.user.email
def subscription_count(obj):
return obj.comics.count()
class UserProfileAdmin(admin.ModelAdmin):
list_display = ('user', email, 'secret_key', subscription_count)
inlines = [SubscriptionInline,]
readonly_fields = ('user',)
admin.site.register(models.UserProfile, UserProfileAdmin)
| Add subscription count to comics profile list | Add subscription count to comics profile list
| Python | agpl-3.0 | datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,jodal/comics,jodal/comics | ---
+++
@@ -12,8 +12,12 @@
return obj.user.email
+def subscription_count(obj):
+ return obj.comics.count()
+
+
class UserProfileAdmin(admin.ModelAdmin):
- list_display = ('user', email, 'secret_key')
+ list_display = ('user', email, 'secret_key', subscription_count)
inlines = [SubscriptionInline,]
readonly_fields = ('user',)
|
1d5442aa70d2ed2569cc062d476129840d08a610 | oscar/apps/shipping/repository.py | oscar/apps/shipping/repository.py | from django.core.exceptions import ImproperlyConfigured
from oscar.apps.shipping.methods import Free, NoShippingRequired
class Repository(object):
"""
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
"""
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
def get_default_shipping_method(self, user, basket, shipping_addr=None, **kwargs):
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
return methods[0]
def add_basket_to_methods(self, basket, methods):
for method in methods:
method.set_basket(basket)
return methods
def find_by_code(self, code):
"""
Return the appropriate Method object for the given code
"""
known_methods = [Free, NoShippingRequired]
for klass in known_methods:
if code == getattr(klass, 'code'):
return klass()
return None
| from django.core.exceptions import ImproperlyConfigured
from oscar.apps.shipping.methods import Free, NoShippingRequired
class Repository(object):
"""
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
"""
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
def get_default_shipping_method(self, user, basket, shipping_addr=None, **kwargs):
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
return min(methods, key=lambda method: method.basket_charge_incl_tax())
def add_basket_to_methods(self, basket, methods):
for method in methods:
method.set_basket(basket)
return methods
def find_by_code(self, code):
"""
Return the appropriate Method object for the given code
"""
known_methods = [Free, NoShippingRequired]
for klass in known_methods:
if code == getattr(klass, 'code'):
return klass()
return None
| Make the cheapest shipping method the default one | Make the cheapest shipping method the default one
| Python | bsd-3-clause | mexeniz/django-oscar,kapari/django-oscar,makielab/django-oscar,eddiep1101/django-oscar,Jannes123/django-oscar,thechampanurag/django-oscar,rocopartners/django-oscar,john-parton/django-oscar,elliotthill/django-oscar,itbabu/django-oscar,mexeniz/django-oscar,jinnykoo/wuyisj.com,marcoantoniooliveira/labweb,sonofatailor/django-oscar,vovanbo/django-oscar,ka7eh/django-oscar,jlmadurga/django-oscar,saadatqadri/django-oscar,amirrpp/django-oscar,makielab/django-oscar,bnprk/django-oscar,okfish/django-oscar,ahmetdaglarbas/e-commerce,manevant/django-oscar,ka7eh/django-oscar,kapari/django-oscar,MatthewWilkes/django-oscar,lijoantony/django-oscar,jmt4/django-oscar,elliotthill/django-oscar,anentropic/django-oscar,django-oscar/django-oscar,pdonadeo/django-oscar,jlmadurga/django-oscar,pdonadeo/django-oscar,django-oscar/django-oscar,john-parton/django-oscar,pasqualguerrero/django-oscar,okfish/django-oscar,josesanch/django-oscar,nfletton/django-oscar,WadeYuChen/django-oscar,binarydud/django-oscar,faratro/django-oscar,Jannes123/django-oscar,Idematica/django-oscar,dongguangming/django-oscar,WillisXChen/django-oscar,WillisXChen/django-oscar,lijoantony/django-oscar,rocopartners/django-oscar,nickpack/django-oscar,jlmadurga/django-oscar,sasha0/django-oscar,nfletton/django-oscar,WillisXChen/django-oscar,solarissmoke/django-oscar,taedori81/django-oscar,manevant/django-oscar,dongguangming/django-oscar,adamend/django-oscar,kapt/django-oscar,lijoantony/django-oscar,okfish/django-oscar,nfletton/django-oscar,Idematica/django-oscar,adamend/django-oscar,marcoantoniooliveira/labweb,faratro/django-oscar,solarissmoke/django-oscar,nickpack/django-oscar,nickpack/django-oscar,django-oscar/django-oscar,spartonia/django-oscar,spartonia/django-oscar,okfish/django-oscar,sonofatailor/django-oscar,WadeYuChen/django-oscar,pasqualguerrero/django-oscar,DrOctogon/unwash_ecom,kapari/django-oscar,MatthewWilkes/django-oscar,jinnykoo/christmas,makielab/django-oscar,QLGu/django-oscar,adamend/django-oscar,sasha0/django-oscar,michaelkuty/django-oscar,ademuk/django-oscar,john-parton/django-oscar,Bogh/django-oscar,bschuon/django-oscar,jmt4/django-oscar,pasqualguerrero/django-oscar,WillisXChen/django-oscar,makielab/django-oscar,django-oscar/django-oscar,vovanbo/django-oscar,saadatqadri/django-oscar,DrOctogon/unwash_ecom,dongguangming/django-oscar,thechampanurag/django-oscar,QLGu/django-oscar,jlmadurga/django-oscar,thechampanurag/django-oscar,monikasulik/django-oscar,binarydud/django-oscar,anentropic/django-oscar,monikasulik/django-oscar,jinnykoo/wuyisj.com,manevant/django-oscar,jinnykoo/wuyisj,rocopartners/django-oscar,MatthewWilkes/django-oscar,Jannes123/django-oscar,lijoantony/django-oscar,john-parton/django-oscar,kapari/django-oscar,anentropic/django-oscar,WadeYuChen/django-oscar,ahmetdaglarbas/e-commerce,monikasulik/django-oscar,marcoantoniooliveira/labweb,bschuon/django-oscar,monikasulik/django-oscar,michaelkuty/django-oscar,michaelkuty/django-oscar,Idematica/django-oscar,adamend/django-oscar,bschuon/django-oscar,taedori81/django-oscar,WillisXChen/django-oscar,pdonadeo/django-oscar,nfletton/django-oscar,thechampanurag/django-oscar,sasha0/django-oscar,ahmetdaglarbas/e-commerce,spartonia/django-oscar,eddiep1101/django-oscar,jinnykoo/wuyisj.com,taedori81/django-oscar,manevant/django-oscar,pdonadeo/django-oscar,kapt/django-oscar,MatthewWilkes/django-oscar,josesanch/django-oscar,jmt4/django-oscar,WadeYuChen/django-oscar,mexeniz/django-oscar,mexeniz/django-oscar,sonofatailor/django-oscar,binarydud/django-oscar,eddiep1101/django-oscar,vovanbo/django-oscar,ahmetdaglarbas/e-commerce,machtfit/django-oscar,itbabu/django-oscar,sonofatailor/django-oscar,Jannes123/django-oscar,faratro/django-oscar,saadatqadri/django-oscar,bnprk/django-oscar,elliotthill/django-oscar,taedori81/django-oscar,rocopartners/django-oscar,vovanbo/django-oscar,bnprk/django-oscar,michaelkuty/django-oscar,anentropic/django-oscar,machtfit/django-oscar,ka7eh/django-oscar,binarydud/django-oscar,itbabu/django-oscar,marcoantoniooliveira/labweb,eddiep1101/django-oscar,dongguangming/django-oscar,itbabu/django-oscar,jinnykoo/christmas,Bogh/django-oscar,jmt4/django-oscar,ademuk/django-oscar,amirrpp/django-oscar,sasha0/django-oscar,amirrpp/django-oscar,ademuk/django-oscar,pasqualguerrero/django-oscar,faratro/django-oscar,machtfit/django-oscar,jinnykoo/wuyisj,solarissmoke/django-oscar,solarissmoke/django-oscar,amirrpp/django-oscar,QLGu/django-oscar,josesanch/django-oscar,ka7eh/django-oscar,nickpack/django-oscar,jinnykoo/wuyisj,jinnykoo/wuyisj,jinnykoo/wuyisj.com,bschuon/django-oscar,kapt/django-oscar,WillisXChen/django-oscar,Bogh/django-oscar,saadatqadri/django-oscar,ademuk/django-oscar,bnprk/django-oscar,spartonia/django-oscar,QLGu/django-oscar,DrOctogon/unwash_ecom,jinnykoo/christmas,Bogh/django-oscar | ---
+++
@@ -7,16 +7,16 @@
Repository class responsible for returning ShippingMethod
objects for a given user, basket etc
"""
-
+
def get_shipping_methods(self, user, basket, shipping_addr=None, **kwargs):
"""
Return a list of all applicable shipping method objects
for a given basket.
-
+
We default to returning the Method models that have been defined but
this behaviour can easily be overridden by subclassing this class
and overriding this method.
- """
+ """
methods = [Free()]
return self.add_basket_to_methods(basket, methods)
@@ -24,7 +24,7 @@
methods = self.get_shipping_methods(user, basket, shipping_addr, **kwargs)
if len(methods) == 0:
raise ImproperlyConfigured("You need to define some shipping methods")
- return methods[0]
+ return min(methods, key=lambda method: method.basket_charge_incl_tax())
def add_basket_to_methods(self, basket, methods):
for method in methods: |
e337dc55aa5420506c1351d7000e903afdb4d4ef | account_analytic_invoice_line_menu/__openerp__.py | account_analytic_invoice_line_menu/__openerp__.py | # -*- coding: utf-8 -*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
"name": "Account Analytic Invoice Line Menu",
"version": "8.0.1.0.0",
"license": "AGPL-3",
"author": "AvanzOSC",
"website": "http://www.avanzosc.es",
"contributors": [
"Ana Juaristi <anajuaristi@avanzosc.es>",
"Alfredo de la Fuente <alfredodelafuente@avanzosc.es>",
],
"category": "Sales Management",
"depends": [
],
"data": [
"views/account_analytic_invoice_line_view.xml",
],
"installable": True,
}
| # -*- coding: utf-8 -*-
# (c) 2016 Alfredo de la Fuente - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
"name": "Account Analytic Invoice Line Menu",
"version": "8.0.1.0.0",
"license": "AGPL-3",
"author": "AvanzOSC",
"website": "http://www.avanzosc.es",
"contributors": [
"Ana Juaristi <anajuaristi@avanzosc.es>",
"Alfredo de la Fuente <alfredodelafuente@avanzosc.es>",
],
"category": "Sales Management",
"depends": [
"account_analytic_analysis"
],
"data": [
"views/account_analytic_invoice_line_view.xml",
],
"installable": True,
}
| Put dependency to "account_analytic_analysis" module. | [FIX] account_analytic_invoice_line_menu: Put dependency to "account_analytic_analysis" module.
| Python | agpl-3.0 | esthermm/odoo-addons,Daniel-CA/odoo-addons,mikelarre/hr-addons,Daniel-CA/odoo-addons,esthermm/odoo-addons,Daniel-CA/odoo-addons,esthermm/odoo-addons | ---
+++
@@ -13,6 +13,7 @@
],
"category": "Sales Management",
"depends": [
+ "account_analytic_analysis"
],
"data": [
"views/account_analytic_invoice_line_view.xml", |
477364a4d2895fc79af2a57ace35ededf0281911 | mistral/db/sqlalchemy/migration/alembic_migrations/versions/003_cron_trigger_constraints.py | mistral/db/sqlalchemy/migration/alembic_migrations/versions/003_cron_trigger_constraints.py | # Copyright 2015 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""cron_trigger_constraints
Revision ID: 003
Revises: 002
Create Date: 2015-05-25 13:09:50.190136
"""
# revision identifiers, used by Alembic.
revision = '003'
down_revision = '002'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'cron_triggers_v2',
sa.Column('first_execution_time', sa.DateTime(), nullable=True)
)
op.drop_index('workflow_input_hash', table_name='cron_triggers_v2')
op.drop_index('workflow_input_hash_2', table_name='cron_triggers_v2')
op.create_unique_constraint(
None,
'cron_triggers_v2', [
'workflow_input_hash', 'workflow_name', 'pattern',
'project_id', 'workflow_params_hash', 'remaining_executions',
'first_execution_time'
]
)
| # Copyright 2015 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""cron_trigger_constraints
Revision ID: 003
Revises: 002
Create Date: 2015-05-25 13:09:50.190136
"""
# revision identifiers, used by Alembic.
revision = '003'
down_revision = '002'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'cron_triggers_v2',
sa.Column('first_execution_time', sa.DateTime(), nullable=True)
)
op.create_unique_constraint(
None,
'cron_triggers_v2', [
'workflow_input_hash', 'workflow_name', 'pattern',
'project_id', 'workflow_params_hash', 'remaining_executions',
'first_execution_time'
]
)
| Fix database upgrade from a new database | Fix database upgrade from a new database
This fixes the problem where running "mistral-db-manage upgrade heads" on a
new database result in error with workflow_input_hash index does not exist.
Change-Id: I560b2b78d11cd3fd4ae9c8606e4336e87b22ef27
Closes-Bug: #1519929
| Python | apache-2.0 | openstack/mistral,StackStorm/mistral,openstack/mistral,StackStorm/mistral | ---
+++
@@ -34,8 +34,7 @@
'cron_triggers_v2',
sa.Column('first_execution_time', sa.DateTime(), nullable=True)
)
- op.drop_index('workflow_input_hash', table_name='cron_triggers_v2')
- op.drop_index('workflow_input_hash_2', table_name='cron_triggers_v2')
+
op.create_unique_constraint(
None,
'cron_triggers_v2', [ |
9f1d4788c5f3751b978da97434b5f6c2e22105b5 | django_inbound_email/__init__.py | django_inbound_email/__init__.py | """An inbound email handler for Django."""
__title__ = 'django-inbound-email'
__version__ = '0.3.3'
__author__ = 'YunoJuno Ltd'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 YunoJuno'
__description__ = (
"A Django app to make it easy to receive inbound emails from "
"a hosted transactional email service (e.g. SendGrid, Postmark, "
"Mandrill, etc.)."
)
| """An inbound email handler for Django."""
__title__ = 'django-inbound-email'
__version__ = '0.3.3'
__author__ = 'YunoJuno Ltd'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 YunoJuno'
__description__ = 'A Django app for receiving inbound emails.'
| Update package description so it displays correctly on PyPI. | Update package description so it displays correctly on PyPI.
The description was wrapping, so it appeared with a single '(' character
on PyPI. I've updated it so that it's now all on a single line.
| Python | mit | yunojuno/django-inbound-email | ---
+++
@@ -5,8 +5,4 @@
__author__ = 'YunoJuno Ltd'
__license__ = 'MIT'
__copyright__ = 'Copyright 2014 YunoJuno'
-__description__ = (
- "A Django app to make it easy to receive inbound emails from "
- "a hosted transactional email service (e.g. SendGrid, Postmark, "
- "Mandrill, etc.)."
-)
+__description__ = 'A Django app for receiving inbound emails.' |
d0a87e354cd75429591fe860f070652b2438950e | bisnode/models.py | bisnode/models.py | from django.db import models
from .constants import COMPANY_RATING_REPORT
from .bisnode import get_bisnode_company_report
class BisnodeRatingReport(models.Model):
organization_number = models.CharField(max_length=10, null=True,
blank=True)
rating_code = models.CharField(max_length=3, null=True, blank=True)
date_of_rating = models.DateField(blank=True, null=True)
def get(self):
rating_report = get_bisnode_company_report(
report_type=COMPANY_RATING_REPORT,
organization_number=self.organization_number)
company_data = rating_report.generalCompanyData[0]
self.rating_code = company_data['ratingCode']
self.date_of_rating = company_data['dateOfRating']
self.save()
| Add Bisnode Rating Report model | Add Bisnode Rating Report model
| Python | mit | FundedByMe/django-bisnode | ---
+++
@@ -0,0 +1,21 @@
+from django.db import models
+
+from .constants import COMPANY_RATING_REPORT
+
+from .bisnode import get_bisnode_company_report
+
+
+class BisnodeRatingReport(models.Model):
+ organization_number = models.CharField(max_length=10, null=True,
+ blank=True)
+ rating_code = models.CharField(max_length=3, null=True, blank=True)
+ date_of_rating = models.DateField(blank=True, null=True)
+
+ def get(self):
+ rating_report = get_bisnode_company_report(
+ report_type=COMPANY_RATING_REPORT,
+ organization_number=self.organization_number)
+ company_data = rating_report.generalCompanyData[0]
+ self.rating_code = company_data['ratingCode']
+ self.date_of_rating = company_data['dateOfRating']
+ self.save() | |
f4014dea504293f3630205092d71ac272763c3b3 | blist/__init__.py | blist/__init__.py | from blist._blist import *
import collections
if hasattr(collections, 'MutableSet'): # Only supported in Python 2.6+
from blist._sortedlist import sortedlist, sortedset, weaksortedlist, weaksortedset
from blist._sorteddict import sorteddict
from blist._btuple import btuple
collections.MutableSequence.register(blist)
del collections
| from blist._blist import *
import collections
if hasattr(collections, 'MutableSet'): # Only supported in Python 2.6+
from blist._sortedlist import sortedlist, sortedset, weaksortedlist, weaksortedset
from blist._sorteddict import sorteddict
from blist._btuple import btuple
collections.MutableSequence.register(blist)
del _sortedlist, _sorteddict, _btuple
del collections
| Clean up the blist namespace. | Clean up the blist namespace.
| Python | bsd-3-clause | DanielStutzbach/blist,DanielStutzbach/blist,pfmoore/blist,pfmoore/blist | ---
+++
@@ -5,4 +5,5 @@
from blist._sorteddict import sorteddict
from blist._btuple import btuple
collections.MutableSequence.register(blist)
+ del _sortedlist, _sorteddict, _btuple
del collections |
8c3e3ec6076d8b9ee858fca00d92717d77c67ade | time_lapse.py | time_lapse.py | #!/usr/bin/env python
import sys
import time
import picamera
import settings
from settings import IMAGE, SNAP
import uploader
def main():
with picamera.PiCamera() as camera:
camera.resolution = (IMAGE.resolution_x, IMAGE.resolution_y)
time.sleep(2)
output_file = settings.IMAGES_DIRECTORY + '/img{counter:03d}.jpg'
capture = camera.capture_continuous(output_file, quality=IMAGE.quality)
for i, _ in enumerate(capture):
if i == SNAP.total - 1:
break
time.sleep(SNAP.interval)
if __name__ == '__main__':
while True:
main()
| #!/usr/bin/env python
import time
import picamera
from settings import Job, IMAGES_DIRECTORY
def main():
job = Job()
if job.exists():
resolution_x = job.image_settings.resolution_x
resolution_y = job.image_settings.resolution_y
image_quality = job.image_settings.quality
snap_interval = job.snap_settings.interval
snap_total = job.snap_settings.total
with picamera.PiCamera() as camera:
camera.resolution = (resolution_x, resolution_y)
time.sleep(2)
output_file = IMAGES_DIRECTORY + '/img{counter:03d}.jpg'
capture = camera.capture_continuous(output_file, quality=image_quality)
for i, _ in enumerate(capture):
if i == snap_total - 1:
job.archive()
break
time.sleep(snap_interval)
if __name__ == '__main__':
while True:
main()
| Check for job in main loop | Check for job in main loop
| Python | mit | projectweekend/Pi-Camera-Time-Lapse,projectweekend/Pi-Camera-Time-Lapse | ---
+++
@@ -1,23 +1,28 @@
#!/usr/bin/env python
-import sys
import time
import picamera
-import settings
-from settings import IMAGE, SNAP
-import uploader
+from settings import Job, IMAGES_DIRECTORY
def main():
- with picamera.PiCamera() as camera:
- camera.resolution = (IMAGE.resolution_x, IMAGE.resolution_y)
- time.sleep(2)
- output_file = settings.IMAGES_DIRECTORY + '/img{counter:03d}.jpg'
- capture = camera.capture_continuous(output_file, quality=IMAGE.quality)
- for i, _ in enumerate(capture):
- if i == SNAP.total - 1:
- break
- time.sleep(SNAP.interval)
+ job = Job()
+ if job.exists():
+ resolution_x = job.image_settings.resolution_x
+ resolution_y = job.image_settings.resolution_y
+ image_quality = job.image_settings.quality
+ snap_interval = job.snap_settings.interval
+ snap_total = job.snap_settings.total
+ with picamera.PiCamera() as camera:
+ camera.resolution = (resolution_x, resolution_y)
+ time.sleep(2)
+ output_file = IMAGES_DIRECTORY + '/img{counter:03d}.jpg'
+ capture = camera.capture_continuous(output_file, quality=image_quality)
+ for i, _ in enumerate(capture):
+ if i == snap_total - 1:
+ job.archive()
+ break
+ time.sleep(snap_interval)
if __name__ == '__main__': |
7f1a58f9faacb0bb0e95c2527a348195742eb866 | tornado/test/autoreload_test.py | tornado/test/autoreload_test.py | from __future__ import absolute_import, division, print_function
import os
import subprocess
from subprocess import Popen
import sys
from tempfile import mkdtemp
from tornado.test.util import unittest
MAIN = """\
import os
import sys
from tornado import autoreload
# This import will fail if path is not set up correctly
import testapp
print('Starting')
if 'TESTAPP_STARTED' not in os.environ:
os.environ['TESTAPP_STARTED'] = '1'
sys.stdout.flush()
autoreload._reload()
"""
class AutoreloadTest(unittest.TestCase):
def test_reload_module(self):
# Create temporary test application
path = mkdtemp()
os.mkdir(os.path.join(path, 'testapp'))
open(os.path.join(path, 'testapp/__init__.py'), 'w').close()
with open(os.path.join(path, 'testapp/__main__.py'), 'w') as f:
f.write(MAIN)
# Make sure the tornado module under test is available to the test
# application
pythonpath = os.getcwd()
if 'PYTHONPATH' in os.environ:
pythonpath += os.pathsep + os.environ['PYTHONPATH']
p = Popen([sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath))
out = p.communicate()[0].decode()
self.assertEqual(out, 'Starting\nStarting\n')
| from __future__ import absolute_import, division, print_function
import os
import subprocess
from subprocess import Popen
import sys
from tempfile import mkdtemp
from tornado.test.util import unittest
MAIN = """\
import os
import sys
from tornado import autoreload
# This import will fail if path is not set up correctly
import testapp
print('Starting')
if 'TESTAPP_STARTED' not in os.environ:
os.environ['TESTAPP_STARTED'] = '1'
sys.stdout.flush()
autoreload._reload()
"""
class AutoreloadTest(unittest.TestCase):
def test_reload_module(self):
# Create temporary test application
path = mkdtemp()
os.mkdir(os.path.join(path, 'testapp'))
open(os.path.join(path, 'testapp/__init__.py'), 'w').close()
with open(os.path.join(path, 'testapp/__main__.py'), 'w') as f:
f.write(MAIN)
# Make sure the tornado module under test is available to the test
# application
pythonpath = os.getcwd()
if 'PYTHONPATH' in os.environ:
pythonpath += os.pathsep + os.environ['PYTHONPATH']
p = Popen(
[sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath),
universal_newlines=True)
out = p.communicate()[0]
self.assertEqual(out, 'Starting\nStarting\n')
| Fix newline handling in autoreload test | Fix newline handling in autoreload test
| Python | apache-2.0 | SuminAndrew/tornado,mivade/tornado,legnaleurc/tornado,tornadoweb/tornado,ifduyue/tornado,bdarnell/tornado,NoyaInRain/tornado,bdarnell/tornado,ajdavis/tornado,NoyaInRain/tornado,bdarnell/tornado,eklitzke/tornado,wujuguang/tornado,allenl203/tornado,SuminAndrew/tornado,Lancher/tornado,Lancher/tornado,NoyaInRain/tornado,NoyaInRain/tornado,lilydjwg/tornado,allenl203/tornado,ajdavis/tornado,lilydjwg/tornado,hhru/tornado,SuminAndrew/tornado,ajdavis/tornado,wujuguang/tornado,wujuguang/tornado,dongpinglai/my_tornado,SuminAndrew/tornado,NoyaInRain/tornado,lilydjwg/tornado,allenl203/tornado,Lancher/tornado,mivade/tornado,dongpinglai/my_tornado,eklitzke/tornado,legnaleurc/tornado,hhru/tornado,wujuguang/tornado,bdarnell/tornado,mivade/tornado,dongpinglai/my_tornado,ifduyue/tornado,NoyaInRain/tornado,allenl203/tornado,mivade/tornado,hhru/tornado,dongpinglai/my_tornado,hhru/tornado,tornadoweb/tornado,ifduyue/tornado,mivade/tornado,lilydjwg/tornado,Lancher/tornado,dongpinglai/my_tornado,bdarnell/tornado,ifduyue/tornado,eklitzke/tornado,Lancher/tornado,allenl203/tornado,tornadoweb/tornado,eklitzke/tornado,ajdavis/tornado,ajdavis/tornado,tornadoweb/tornado,legnaleurc/tornado,legnaleurc/tornado,SuminAndrew/tornado,ifduyue/tornado,legnaleurc/tornado,hhru/tornado,eklitzke/tornado,wujuguang/tornado,dongpinglai/my_tornado | ---
+++
@@ -40,7 +40,9 @@
if 'PYTHONPATH' in os.environ:
pythonpath += os.pathsep + os.environ['PYTHONPATH']
- p = Popen([sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
- cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath))
- out = p.communicate()[0].decode()
+ p = Popen(
+ [sys.executable, '-m', 'testapp'], stdout=subprocess.PIPE,
+ cwd=path, env=dict(os.environ, PYTHONPATH=pythonpath),
+ universal_newlines=True)
+ out = p.communicate()[0]
self.assertEqual(out, 'Starting\nStarting\n') |
fd78a7c2cdff3aca182b12f7e9fa5e0819c68720 | tests/factory/makers/test_selections.py | tests/factory/makers/test_selections.py | from mongoframes.factory import makers
from mongoframes.factory.makers import selections as selection_makers
from tests.fixtures import *
def test_cycle():
"""
`Cycle` makers should return values from a list of items (python types of or
makers) one after another.
"""
# Configured to cycle through a list of python types
maker = selection_makers.Cycle(['foo', 'bar', 'zee'])
for i, value in enumerate(['foo', 'bar', 'zee']):
# Check the assembled result
assembled = maker._assemble()
assert assembled == [i, None]
# Check the finished result
finished = maker._finish(assembled)
assert finished == value
# Configured to cycle throguh a list of makers
maker = selection_makers.Cycle([
makers.Static('foo'),
makers.Static('bar'),
makers.Static('zee')
])
for i, value in enumerate(['foo', 'bar', 'zee']):
# Check the assembled result
assembled = maker._assemble()
assert assembled == [i, value]
# Check the finished result
finished = maker._finish(assembled)
assert finished == value
| Add test for `Cycle` maker. | Add test for `Cycle` maker.
| Python | mit | GetmeUK/MongoFrames | ---
+++
@@ -0,0 +1,39 @@
+from mongoframes.factory import makers
+from mongoframes.factory.makers import selections as selection_makers
+
+from tests.fixtures import *
+
+
+def test_cycle():
+ """
+ `Cycle` makers should return values from a list of items (python types of or
+ makers) one after another.
+ """
+
+ # Configured to cycle through a list of python types
+ maker = selection_makers.Cycle(['foo', 'bar', 'zee'])
+
+ for i, value in enumerate(['foo', 'bar', 'zee']):
+ # Check the assembled result
+ assembled = maker._assemble()
+ assert assembled == [i, None]
+
+ # Check the finished result
+ finished = maker._finish(assembled)
+ assert finished == value
+
+ # Configured to cycle throguh a list of makers
+ maker = selection_makers.Cycle([
+ makers.Static('foo'),
+ makers.Static('bar'),
+ makers.Static('zee')
+ ])
+
+ for i, value in enumerate(['foo', 'bar', 'zee']):
+ # Check the assembled result
+ assembled = maker._assemble()
+ assert assembled == [i, value]
+
+ # Check the finished result
+ finished = maker._finish(assembled)
+ assert finished == value | |
ace1997f5d1cab297ab68886501b45602b2d8e2d | cards/models.py | cards/models.py | from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from time import time
def card_image_filename(instance, filename):
timestamp = int(time())
return 'cards/%s%d.jpg' % (instance, timestamp)
@python_2_unicode_compatible
class Card(models.Model):
title = models.CharField(max_length=140, unique=True)
is_title_visible = models.BooleanField(default=True)
text = models.TextField()
secondary_text = models.TextField(null=True, blank=True)
author = models.CharField(max_length=100, null=True, blank=True)
image = models.ImageField(upload_to=card_image_filename, null=True, blank=True)
creation_datetime = models.DateTimeField(auto_now_add=True)
update_datetime = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_created_by')
updated_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_updated_by')
def __str__(self):
return self.title
| from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from time import time
def card_image_filename(instance):
timestamp = int(time())
return 'cards/%s%d.jpg' % (instance, timestamp)
@python_2_unicode_compatible
class Card(models.Model):
title = models.CharField(max_length=140, unique=True)
is_title_visible = models.BooleanField(default=True)
text = models.TextField()
secondary_text = models.TextField(null=True, blank=True)
author = models.CharField(max_length=100, null=True, blank=True)
image = models.ImageField(upload_to=card_image_filename, null=True, blank=True)
creation_datetime = models.DateTimeField(auto_now_add=True)
update_datetime = models.DateTimeField(auto_now=True)
is_active = models.BooleanField(default=True)
created_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_created_by')
updated_by = models.ForeignKey(User, null=True, blank=True, related_name='%(class)s_updated_by')
def __str__(self):
return self.title
| Remove unused filename parameter from card image filename function | Remove unused filename parameter from card image filename function
| Python | mit | neosergio/WisdomBox | ---
+++
@@ -6,7 +6,7 @@
from time import time
-def card_image_filename(instance, filename):
+def card_image_filename(instance):
timestamp = int(time())
return 'cards/%s%d.jpg' % (instance, timestamp)
|
1261777b6aaaea6947a32477e340ef1597045866 | nested_admin/urls.py | nested_admin/urls.py | try:
from django.conf.urls.defaults import patterns, url
except ImportError:
from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^server-data\.js$', 'nested_admin.views.server_data_js',
name="nesting_server_data"),
)
| from django.conf.urls import patterns, url
urlpatterns = patterns('',
url(r'^server-data\.js$', 'nested_admin.views.server_data_js',
name="nesting_server_data"),
)
| Fix DeprecationWarning in Django 1.5 | Fix DeprecationWarning in Django 1.5
| Python | bsd-2-clause | sbussetti/django-nested-admin,sbussetti/django-nested-admin,olivierdalang/django-nested-admin,sbussetti/django-nested-admin,olivierdalang/django-nested-admin,olivierdalang/django-nested-admin | ---
+++
@@ -1,7 +1,4 @@
-try:
- from django.conf.urls.defaults import patterns, url
-except ImportError:
- from django.conf.urls import patterns, url
+from django.conf.urls import patterns, url
urlpatterns = patterns('', |
2005e048f7342c011f4bc08899d5cb4d4a15357a | debugtools/middleware/xviewmiddleware.py | debugtools/middleware/xviewmiddleware.py | from debugtools.utils.xview import track_view_name, get_used_view_name, get_used_template
class XViewMiddleware(object):
"""
Adds an X-View header to requests.
If the request IP is internal or the user is a logged-in staff member,
add an ``X-View`` header to the response.
This is a variation of the default Django XViewMiddleware, which only works with HEAD requests
as it is specifically designed for the documentation system.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
assert hasattr(request, 'user'), (
"The XView middleware requires authentication middleware to be "
"installed. Edit your MIDDLEWARE_CLASSES setting to insert "
"'django.contrib.auth.middleware.AuthenticationMiddleware'.")
track_view_name(request, view_func)
def process_response(self, request, response):
view_name = get_used_view_name(request)
if view_name:
response['X-View'] = view_name
template_name, choices = get_used_template(response)
if template_name:
if choices:
response['X-View-Template'] = '{0} (out of: {1})'.format(template_name, ', '.join(choices))
else:
response['X-View-Template'] = template_name
return response
| import django
from debugtools.utils.xview import track_view_name, get_used_view_name, get_used_template
if django.VERSION >= (1, 10):
from django.utils.deprecation import MiddlewareMixin
else:
MiddlewareMixin = object
class XViewMiddleware(MiddlewareMixin):
"""
Adds an X-View header to requests.
If the request IP is internal or the user is a logged-in staff member,
add an ``X-View`` header to the response.
This is a variation of the default Django XViewMiddleware, which only works with HEAD requests
as it is specifically designed for the documentation system.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
assert hasattr(request, 'user'), (
"The XView middleware requires authentication middleware to be "
"installed. Edit your MIDDLEWARE_CLASSES setting to insert "
"'django.contrib.auth.middleware.AuthenticationMiddleware'.")
track_view_name(request, view_func)
def process_response(self, request, response):
view_name = get_used_view_name(request)
if view_name:
response['X-View'] = view_name
template_name, choices = get_used_template(response)
if template_name:
if choices:
response['X-View-Template'] = '{0} (out of: {1})'.format(template_name, ', '.join(choices))
else:
response['X-View-Template'] = template_name
return response
| Add MiddlewareMixin for Django 1.10 middleware compatibility | Add MiddlewareMixin for Django 1.10 middleware compatibility
| Python | apache-2.0 | edoburu/django-debugtools,edoburu/django-debugtools,edoburu/django-debugtools | ---
+++
@@ -1,7 +1,14 @@
+import django
from debugtools.utils.xview import track_view_name, get_used_view_name, get_used_template
-class XViewMiddleware(object):
+if django.VERSION >= (1, 10):
+ from django.utils.deprecation import MiddlewareMixin
+else:
+ MiddlewareMixin = object
+
+
+class XViewMiddleware(MiddlewareMixin):
"""
Adds an X-View header to requests.
|
1cb01d48246baf0ca84a23bd8718d46471b8105f | bin/mpy-tool-wrapper.py | bin/mpy-tool-wrapper.py | #!/usr/bin/env python
#
# Wrapper for mpy-cross.py.
#
from __future__ import print_function
import sys
import os
import subprocess
def main():
command = sys.argv[3:]
env = dict(os.environ, PYTHONPATH=sys.argv[2])
with open(sys.argv[1], 'w') as f:
f.write(subprocess.check_output(command, env=env))
if __name__ == '__main__':
main()
| #!/usr/bin/env python
#
# Wrapper for mpy-cross.py.
#
from __future__ import print_function
import sys
import os
import subprocess
def main():
command = sys.argv[3:]
env = dict(os.environ, PYTHONPATH=sys.argv[2])
with open(sys.argv[1], 'w') as f:
f.write(subprocess.check_output(command, env=env).decode("utf-8"))
if __name__ == '__main__':
main()
| Add support for Python 3 | Add support for Python 3 | Python | mit | eerimoq/pumba,eerimoq/pumbaa,eerimoq/pumbaa,eerimoq/pumbaa,eerimoq/pumbaa,eerimoq/pumba,eerimoq/pumba | ---
+++
@@ -14,8 +14,8 @@
command = sys.argv[3:]
env = dict(os.environ, PYTHONPATH=sys.argv[2])
with open(sys.argv[1], 'w') as f:
- f.write(subprocess.check_output(command, env=env))
-
+ f.write(subprocess.check_output(command, env=env).decode("utf-8"))
+
if __name__ == '__main__':
main() |
108a05b050383bca218cd02be499f1fad58065dc | test/test_refmanage.py | test/test_refmanage.py | # -*- coding: utf-8 -*-
import unittest
import pathlib2 as pathlib
import refmanage
class NoSpecifiedFunctionality(unittest.TestCase):
"""
Tests when no functionality has been specified on cli
"""
def test_no_args(self):
"""
`ref` without arguments should print the help text
"""
pass
def test_version(self):
"""
`ref --version` should return version string
"""
pass
class TestFunctionality(unittest.TestCase):
"""
Test "test" functionality
"""
def test_no_args(self):
"""
`ref test` without additonal arguments should print the help text
"""
pass
def test_default(self):
"""
`ref test *.bib` without flags should default to --unparseable and print list of unparseable files
"""
pass
def test_unparseable(self):
"""
`ref test -u *.bib` should print list of unparseable files
"""
pass
def test_unparseable_verbose(self):
"""
`ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message
"""
pass
def test_parseable(self):
"""
`ref test -p *.bib` should print list of parseable files
"""
pass
def test_parseable_verbose(self):
"""
`ref test -pv *.bib` should print list of parseable files and nothing more
"""
pass
def test_parseable_unparseable(self):
"""
`ref test -up *.bib` should exit with an error
"""
pass
| # -*- coding: utf-8 -*-
import unittest
import pathlib2 as pathlib
import refmanage
class NoSpecifiedFunctionality(unittest.TestCase):
"""
Tests when no functionality has been specified on cli
"""
def test_no_args(self):
"""
`ref` without arguments should print the help text
"""
self.fail()
def test_version(self):
"""
`ref --version` should return version string
"""
self.fail()
class TestFunctionality(unittest.TestCase):
"""
Test "test" functionality
"""
def test_no_args(self):
"""
`ref test` without additonal arguments should print the help text
"""
self.fail()
def test_default(self):
"""
`ref test *.bib` without flags should default to --unparseable and print list of unparseable files
"""
self.fail()
def test_unparseable(self):
"""
`ref test -u *.bib` should print list of unparseable files
"""
self.fail()
def test_unparseable_verbose(self):
"""
`ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message
"""
self.fail()
def test_parseable(self):
"""
`ref test -p *.bib` should print list of parseable files
"""
self.fail()
def test_parseable_verbose(self):
"""
`ref test -pv *.bib` should print list of parseable files and nothing more
"""
self.fail()
def test_parseable_unparseable(self):
"""
`ref test -up *.bib` should exit with an error
"""
self.fail()
| Replace "pass" with "self.fail()" in tests | Replace "pass" with "self.fail()" in tests
In this way, tests that haven't been written will run noisily instead of
silently, encouraging completion of writing tests.
| Python | mit | jrsmith3/refmanage | ---
+++
@@ -11,13 +11,13 @@
"""
`ref` without arguments should print the help text
"""
- pass
+ self.fail()
def test_version(self):
"""
`ref --version` should return version string
"""
- pass
+ self.fail()
class TestFunctionality(unittest.TestCase):
"""
@@ -27,41 +27,41 @@
"""
`ref test` without additonal arguments should print the help text
"""
- pass
+ self.fail()
def test_default(self):
"""
`ref test *.bib` without flags should default to --unparseable and print list of unparseable files
"""
- pass
+ self.fail()
def test_unparseable(self):
"""
`ref test -u *.bib` should print list of unparseable files
"""
- pass
+ self.fail()
def test_unparseable_verbose(self):
"""
`ref test -uv *.bib` should print list of unparseable files with information about corresponding parsing message
"""
- pass
+ self.fail()
def test_parseable(self):
"""
`ref test -p *.bib` should print list of parseable files
"""
- pass
+ self.fail()
def test_parseable_verbose(self):
"""
`ref test -pv *.bib` should print list of parseable files and nothing more
"""
- pass
+ self.fail()
def test_parseable_unparseable(self):
"""
`ref test -up *.bib` should exit with an error
"""
- pass
+ self.fail() |
5156af5576d5663555bc04f5960e7e4cdd861166 | objectrocket/util.py | objectrocket/util.py | """Utility code for the objectrocket package."""
import types
def register_extension_class(ext, base, *args, **kwargs):
"""Instantiate the given extension class and register as a public attribute of the given base.
README: The expected protocol here to instantiate the given extension and pass the base object
as the first positional argument, then unpack args and kwargs as additional arguments to the
extension's constructor.
"""
ext_instance = ext.plugin(base, *args, **kwargs)
setattr(base, ext.name.lstrip('_'), ext_instance)
def register_extension_method(ext, base, *args, **kwargs):
"""Register the given extension method as a public attribute of the given base."""
bound_method = types.MethodType(ext.plugin, base, base.__class__)
setattr(base, ext.name.lstrip('_'), bound_method)
| """Utility code for the objectrocket package."""
import types
def register_extension_class(ext, base, *args, **kwargs):
"""Instantiate the given extension class and register as a public attribute of the given base.
README: The expected protocol here is to instantiate the given extension and pass the base
object as the first positional argument, then unpack args and kwargs as additional arguments to
the extension's constructor.
"""
ext_instance = ext.plugin(base, *args, **kwargs)
setattr(base, ext.name.lstrip('_'), ext_instance)
def register_extension_method(ext, base, *args, **kwargs):
"""Register the given extension method as a public attribute of the given base.
README: The expected protocol here is that the given extension method is an unbound function.
It will be bound to the specified base as a method, and then set as a public attribute of that
base.
"""
bound_method = types.MethodType(ext.plugin, base, base.__class__)
setattr(base, ext.name.lstrip('_'), bound_method)
| Clean up docs on extension protocols. | Clean up docs on extension protocols.
| Python | mit | objectrocket/python-client,objectrocket/python-client | ---
+++
@@ -5,15 +5,20 @@
def register_extension_class(ext, base, *args, **kwargs):
"""Instantiate the given extension class and register as a public attribute of the given base.
- README: The expected protocol here to instantiate the given extension and pass the base object
- as the first positional argument, then unpack args and kwargs as additional arguments to the
- extension's constructor.
+ README: The expected protocol here is to instantiate the given extension and pass the base
+ object as the first positional argument, then unpack args and kwargs as additional arguments to
+ the extension's constructor.
"""
ext_instance = ext.plugin(base, *args, **kwargs)
setattr(base, ext.name.lstrip('_'), ext_instance)
def register_extension_method(ext, base, *args, **kwargs):
- """Register the given extension method as a public attribute of the given base."""
+ """Register the given extension method as a public attribute of the given base.
+
+ README: The expected protocol here is that the given extension method is an unbound function.
+ It will be bound to the specified base as a method, and then set as a public attribute of that
+ base.
+ """
bound_method = types.MethodType(ext.plugin, base, base.__class__)
setattr(base, ext.name.lstrip('_'), bound_method) |
1a8419c6b91276cf578f4c354e34d17551ac2403 | metakernel/__init__.py | metakernel/__init__.py | from ._metakernel import MetaKernel
from . import pexpect
from . import replwrap
from .process_metakernel import ProcessMetaKernel
from .magic import Magic, option
from .parser import Parser
__all__ = ['Magic', 'MetaKernel', 'option']
__version__ = '0.3'
del magic, _metakernel, parser, process_metakernel
| from ._metakernel import MetaKernel
from . import pexpect
from .replwrap import REPLWrapper, u
from .process_metakernel import ProcessMetaKernel
from .magic import Magic, option
from .parser import Parser
__all__ = ['Magic', 'MetaKernel', 'option']
__version__ = '0.3'
del magic, _metakernel, parser, process_metakernel
| Move REPLWrapper and u() function to pkg level | Move REPLWrapper and u() function to pkg level
| Python | bsd-3-clause | Calysto/metakernel | ---
+++
@@ -1,6 +1,6 @@
from ._metakernel import MetaKernel
from . import pexpect
-from . import replwrap
+from .replwrap import REPLWrapper, u
from .process_metakernel import ProcessMetaKernel
from .magic import Magic, option
from .parser import Parser |
e30b8b60de491721f635300840b08b481250fea6 | microbower/__init__.py | microbower/__init__.py |
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
if not (os.path.isfile('.bowerrc') and os.path.isfile('bower.json')):
return
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
os.chdir(bowerrc['directory'])
check_call(['git', 'clone', info['url']])
os.chdir(pkg)
install()
os.chdir(topdir)
|
from subprocess import check_call
import urllib
import json
import os
import os.path
def install():
if not (os.path.isfile('.bowerrc') and os.path.isfile('bower.json')):
return
with open('.bowerrc') as f:
bowerrc = json.load(f)
with open('bower.json') as f:
bower_json = json.load(f)
if not os.path.isdir(bowerrc['directory']):
os.makedirs(bowerrc['directory'])
registry = 'https://bower.herokuapp.com'
topdir = os.path.abspath(os.curdir)
for pkg in bower_json['dependencies'].keys():
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
os.chdir(bowerrc['directory'])
if not os.path.isdir(os.path.join(pkg, '.git')):
check_call(['git', 'clone', info['url'], pkg])
os.chdir(pkg)
install()
os.chdir(topdir)
| Check if the git repo already exists before cloning it again | Check if the git repo already exists before cloning it again
| Python | isc | zenhack/microbower | ---
+++
@@ -24,7 +24,8 @@
req = urllib.urlopen('%s/packages/%s' % (registry, pkg))
info = json.load(req)
os.chdir(bowerrc['directory'])
- check_call(['git', 'clone', info['url']])
+ if not os.path.isdir(os.path.join(pkg, '.git')):
+ check_call(['git', 'clone', info['url'], pkg])
os.chdir(pkg)
install()
os.chdir(topdir) |
4c0cc07edc566b2feebefbcc301a6e16033c613a | bin/register_jsonl.py | bin/register_jsonl.py | #!/usr/bin/env python
# create a single jsonl file from individual register entries
import sys
import os
from openregister import Item
from openregister.representations.jsonl import Writer
register = sys.argv[1] or "register"
dirname = "data/" + register + "/"
writer = Writer(sys.stdout)
for file in os.listdir(dirname):
if file.endswith(".yaml"):
item = Item()
item.yaml = open(dirname + file).read()
writer.write(item)
writer.close()
| #!/usr/bin/env python
# create a single jsonl file from individual register entries
import sys
import os
from openregister import Item
from openregister.representations.jsonl import Writer
register = sys.argv[1] or "register"
dirname = os.path.join("data", register)
writer = Writer(sys.stdout)
for file in os.listdir(dirname):
if file.endswith(".yaml"):
item = Item()
item.yaml = open(os.path.join(dirname, file)).read()
writer.write(item)
writer.close()
| Use os.path.join to build prod | Use os.path.join to build prod
| Python | mit | openregister/registry-data | ---
+++
@@ -8,14 +8,14 @@
from openregister.representations.jsonl import Writer
register = sys.argv[1] or "register"
-dirname = "data/" + register + "/"
+dirname = os.path.join("data", register)
writer = Writer(sys.stdout)
for file in os.listdir(dirname):
if file.endswith(".yaml"):
item = Item()
- item.yaml = open(dirname + file).read()
+ item.yaml = open(os.path.join(dirname, file)).read()
writer.write(item)
writer.close() |
8f698f862e1ea4e2c17e8ddd14052c83bf87ea4c | adzone/views.py | adzone/views.py | # -*- coding: utf-8 -*-
# © Copyright 2009 Andre Engelbrecht. All Rights Reserved.
# This script is licensed under the BSD Open Source Licence
# Please see the text file LICENCE for more information
# If this script is distributed, it must be accompanied by the Licence
from datetime import datetime
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect
from adzone.models import AdBase, AdClick
def ad_view(request, id):
""" Record the click in the database, then redirect to ad url """
ad = get_object_or_404(AdBase, id=id)
click = AdClick.objects.create(
ad=ad,
click_date=datetime.now(),
source_ip=request.META.get('REMOTE_ADDR', '')
)
click.save()
redirect_url = ad.url
if not redirect_url.startswith('http://'):
# Add http:// to the url so that the browser redirects correctly
redirect_url = 'http://' + redirect_url
return HttpResponseRedirect(redirect_url)
| # -*- coding: utf-8 -*-
# © Copyright 2009 Andre Engelbrecht. All Rights Reserved.
# This script is licensed under the BSD Open Source Licence
# Please see the text file LICENCE for more information
# If this script is distributed, it must be accompanied by the Licence
import re
from datetime import datetime
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect
from adzone.models import AdBase, AdClick
http_re = re.compile(r'^https?://')
def ad_view(request, id):
""" Record the click in the database, then redirect to ad url """
ad = get_object_or_404(AdBase, id=id)
click = AdClick.objects.create(
ad=ad,
click_date=datetime.now(),
source_ip=request.META.get('REMOTE_ADDR', '')
)
click.save()
redirect_url = ad.url
if not http_re.match(redirect_url):
# Add http:// to the url so that the browser redirects correctly
redirect_url = 'http://' + redirect_url
return HttpResponseRedirect(redirect_url)
| Improve http check to allow https as well | Improve http check to allow https as well
We switch from 'startswith' to a regex check which allows both as we
tested with https facebook urls and it failed to handle them properly.
| Python | bsd-3-clause | michaeljones/django-adzone,michaeljones/django-adzone | ---
+++
@@ -5,12 +5,17 @@
# Please see the text file LICENCE for more information
# If this script is distributed, it must be accompanied by the Licence
+import re
+
from datetime import datetime
from django.shortcuts import get_object_or_404
from django.http import HttpResponseRedirect
from adzone.models import AdBase, AdClick
+
+
+http_re = re.compile(r'^https?://')
def ad_view(request, id):
@@ -25,7 +30,7 @@
click.save()
redirect_url = ad.url
- if not redirect_url.startswith('http://'):
+ if not http_re.match(redirect_url):
# Add http:// to the url so that the browser redirects correctly
redirect_url = 'http://' + redirect_url
|
f29477416729df9cc198f679a2478f6a077ce365 | app/util.py | app/util.py | # Various utility functions
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
return wrapper
| # Various utility functions
import inspect
import os
from typing import Any, Callable
SHOULD_CACHE = os.environ.get('ENV', 'development') == 'production'
def cached_function(func: Callable[..., Any]) -> Callable[..., Any]:
data = {}
def wrapper(*args: Any) -> Any:
if not SHOULD_CACHE:
return func(*args)
cache_key = ' '.join([str(x) for x in args])
if cache_key not in data:
data[cache_key] = func(*args)
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
wrapper.__signature__ = inspect.signature(func) # type: ignore
return wrapper
| Make cached_function not overwrite signature of wrapped function | Make cached_function not overwrite signature of wrapped function
| Python | mit | albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com | ---
+++
@@ -1,4 +1,5 @@
# Various utility functions
+import inspect
import os
from typing import Any, Callable
@@ -18,4 +19,5 @@
return data[cache_key]
wrapper.__qualname__ = func.__qualname__
+ wrapper.__signature__ = inspect.signature(func) # type: ignore
return wrapper |
8f66d41be2ffc8dd42392a30e6eefcbb6da9b667 | sheared/web/entwine.py | sheared/web/entwine.py | import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
from sheared.python import benchmark
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
| import warnings
from dtml import tal, metal, tales, context
from sheared.python import io
class Entwiner:
def __init__(self):
self.builtins = context.BuiltIns({})
#self.context = context.Context()
#self.context.setDefaults(self.builtins)
def handle(self, request, reply, subpath):
self.context = {}
self.entwine(request, reply, subpath)
r = self.execute(self.page_path, throwaway=0)
reply.send(r)
def execute(self, path, throwaway=1):
r = io.readfile(path)
c = tal.compile(r, tales)
r = tal.execute(c, self.context, self.builtins, tales)
c = metal.compile(r, tales)
r = metal.execute(c, self.context, self.builtins, tales)
if throwaway and r.strip():
warnings.warn('%s: ignored non-macro content' % path)
return r
| Remove import of n/a benchmark module. | Remove import of n/a benchmark module.
git-svn-id: 8b0eea19d26e20ec80f5c0ea247ec202fbcc1090@75 5646265b-94b7-0310-9681-9501d24b2df7
| Python | mit | kirkeby/sheared | ---
+++
@@ -3,7 +3,6 @@
from dtml import tal, metal, tales, context
from sheared.python import io
-from sheared.python import benchmark
class Entwiner:
def __init__(self): |
27ff37e0b0f87d112ea98ff9c3674abed4a3e413 | fontcrunch/__init__.py | fontcrunch/__init__.py | from __future__ import print_function
from fontTools import ttLib
from multiprocessing import Pool
from .fontcrunch import optimize_glyph, plot_glyph
def _optimize(args):
font, name, pdf, penalty, quiet = args
if not quiet:
print('optimizing', name)
glyph = font['glyf'][name]
plot_glyph(font, name, pdf, True)
optimize_glyph(glyph, penalty)
plot_glyph(font, name, pdf, False)
if not quiet:
print('done optimizing', name)
def _get_args(names, font, pdf, penalty, quiet):
for name in names:
yield font, name, pdf, penalty, quiet
def optimize(fn, newfn, plot=None, penalty=None, quiet=False, jobs=None):
font = ttLib.TTFont(fn)
glyf = font['glyf']
pdf = None
if plot is not None:
from reportlab.pdfgen import canvas
pdf = canvas.Canvas(plot)
if jobs:
pool = Pool(jobs)
pool.map(_optimize, _get_args(glyf.keys(), font, pdf, penalty, quiet))
pool.close()
else:
map(_optimize, _get_args(glyf.keys(), font, pdf, penalty, quiet))
font.save(newfn)
if plot is not None:
pdf.save()
| from __future__ import print_function
from fontTools import ttLib
from multiprocessing import Pool
from .fontcrunch import optimize_glyph, plot_glyph
def _optimize(args):
font, name, pdf, penalty, quiet = args
if not quiet:
print('optimizing', name)
glyph = font['glyf'][name]
plot_glyph(font, name, pdf, True)
optimize_glyph(glyph, penalty)
plot_glyph(font, name, pdf, False)
if not quiet:
print('done optimizing', name)
return (name, glyph)
def _get_args(names, font, pdf, penalty, quiet):
for name in names:
yield font, name, pdf, penalty, quiet
def optimize(fn, newfn, plot=None, penalty=None, quiet=False, jobs=None):
font = ttLib.TTFont(fn)
glyf = font['glyf']
pdf = None
if plot is not None:
from reportlab.pdfgen import canvas
pdf = canvas.Canvas(plot)
glyphs = []
if jobs:
pool = Pool(jobs)
glyphs = pool.map(_optimize, _get_args(glyf.keys(), font, pdf, penalty, quiet))
pool.close()
else:
glyphs = map(_optimize, _get_args(glyf.keys(), font, pdf, penalty, quiet))
for name, glyph in glyphs:
font['glyf'][name] = glyph
font.save(newfn)
if plot is not None:
pdf.save()
| Make this code actually do something! | Make this code actually do something!
| Python | apache-2.0 | googlefonts/fontcrunch,googlefonts/quadopt,googlefonts/quadopt,googlefonts/fontcrunch | ---
+++
@@ -15,6 +15,7 @@
plot_glyph(font, name, pdf, False)
if not quiet:
print('done optimizing', name)
+ return (name, glyph)
def _get_args(names, font, pdf, penalty, quiet):
for name in names:
@@ -29,12 +30,16 @@
from reportlab.pdfgen import canvas
pdf = canvas.Canvas(plot)
+ glyphs = []
if jobs:
pool = Pool(jobs)
- pool.map(_optimize, _get_args(glyf.keys(), font, pdf, penalty, quiet))
+ glyphs = pool.map(_optimize, _get_args(glyf.keys(), font, pdf, penalty, quiet))
pool.close()
else:
- map(_optimize, _get_args(glyf.keys(), font, pdf, penalty, quiet))
+ glyphs = map(_optimize, _get_args(glyf.keys(), font, pdf, penalty, quiet))
+
+ for name, glyph in glyphs:
+ font['glyf'][name] = glyph
font.save(newfn)
if plot is not None: |
fe4cca2101245621164b13e7a1c895501da4dbe6 | sketchbook/_version.py | sketchbook/_version.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2017 Kaede Hoshikawa
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = ["__version__"]
_tag_version = (0, 1, 0)
_dev = 0
_version_fragments = [str(i) for i in _tag_version[:3]]
if _dev is not None:
_version_fragments.append(f"dev{_dev}")
__version__ = ".".join(_version_fragments)
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2017 Kaede Hoshikawa
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = ["__version__"]
_tag_version = (0, 1, 1)
_dev = 0
_version_fragments = [str(i) for i in _tag_version[:3]]
if _dev is not None:
_version_fragments.append(f"dev{_dev}")
__version__ = ".".join(_version_fragments)
| Move master version to 0.1.1. | Move master version to 0.1.1.
| Python | apache-2.0 | futursolo/sketchbook,futursolo/sketchbook | ---
+++
@@ -17,7 +17,7 @@
__all__ = ["__version__"]
-_tag_version = (0, 1, 0)
+_tag_version = (0, 1, 1)
_dev = 0
|
a28fe5793c6bcfc7482f840821eb6d7b779a78dd | slackelot/slackelot.py | slackelot/slackelot.py | import time
import requests
class SlackNotificationError(Exception):
pass
def send_slack_message(message, webhook_url, pretext=None, title=None):
""" Send slack message using webhooks
Args:
message (string)
webhook_url (string), 'https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}'
pretext (string)
title (string)
"""
payload = {
'attachments': [
{
'pretext': pretext,
'title': title,
'text': message,
'mrkdwn_in': ['text', 'pretext']
}
],
'link_names': '1',
'as_user': True
}
for i in range(10):
response = requests.post(webhook_url, json=payload)
if response.status_code == 200:
return True
else:
time.sleep(10)
# If the notification doesn't go through after 10 attempts, raise an error.
raise SlackNotificationError('Slack notification failed after 10 attempts.')
| import time
import requests
class SlackNotificationError(Exception):
pass
def send_slack_message(message, webhook_url, pretext=None, title=None):
""" Send slack message using webhooks
Args:
message (string)
webhook_url (string), 'https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}'
pretext (string)
title (string)
"""
if 'https://hooks.slack.com/services/' not in webhook_url:
raise SlackNotificationError(
'webhook_url is not in the correct format. It should look like this:\n\
https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}')
payload = {
'attachments': [
{
'pretext': pretext,
'title': title,
'text': message,
'mrkdwn_in': ['text', 'pretext']
}
],
'link_names': '1',
'as_user': True
}
for i in range(10):
response = requests.post(webhook_url, json=payload)
if response.status_code == 200:
return True
else:
time.sleep(3)
# If the notification doesn't go through after 10 attempts, raise an error.
raise SlackNotificationError('Slack notification failed after 10 attempts.')
| Add initial check for arg webhook_url | Add initial check for arg webhook_url
| Python | mit | Chris-Graffagnino/slackelot | ---
+++
@@ -15,6 +15,10 @@
pretext (string)
title (string)
"""
+ if 'https://hooks.slack.com/services/' not in webhook_url:
+ raise SlackNotificationError(
+ 'webhook_url is not in the correct format. It should look like this:\n\
+ https://hooks.slack.com/services/{team id}/{bot or channel id}/{auth token}')
payload = {
'attachments': [
@@ -34,6 +38,6 @@
if response.status_code == 200:
return True
else:
- time.sleep(10)
+ time.sleep(3)
# If the notification doesn't go through after 10 attempts, raise an error.
raise SlackNotificationError('Slack notification failed after 10 attempts.') |
9b3203bae4d72dab90e15dc1b60fb0518a4a0f40 | run_patch.py | run_patch.py | from patch_headers import patch_headers, add_object_info
import sys
for currentDir in sys.argv[1:]:
print "working on directory: %s" % currentDir
patch_headers(currentDir, new_file_ext='', overwrite=True)
add_object_info(currentDir, new_file_ext='', overwrite=True)
#add_overscan(currentDir, new_file_ext='', overwrite=True)
| """
SYNOPSIS
python run_patch.py dir1 [dir2 dir3 ...]
DESCRIPTION
For each directory dir1, dir2, ... provided on the command line the
headers all of the FITS files in that directory are modified
to add information like LST, apparent object position, and more.
See the full documentation for a list of the specific keywords
that are modified.
This is basically a wrapper around the function `patch_headers` with
the options set so that:
+ "Bad" keywords written by MaxImDL 5 are purged.
+ Additional useful times like LST, JD are added to the header.
+ Apparent position (Alt/Az, hour angle) are added to the header.
+ Information about overscan is added to the header.
+ Files are overwritten.
For more control over what is patched and where the patched files are saved
see the documentation for ``patch_headers`` at
:func:`patch_headers.patch_headers`.
.. Note::
This script is **NOT RECURSIVE**; it will not process files in
subdirectories of the the directories supplied on the command line.
.. WARNING::
This script OVERWRITES the image files in the directories
specified on the command line.
EXAMPLES
Invoking this script from the command line::
python run_patch.py /my/folder/of/images
To work on the same folder from within python, do this::
from run_patch import patch_directories
patch_directories('/my/folder/of/images')
"""
from patch_headers import patch_headers, add_object_info
def patch_directories(directories):
for currentDir in directories:
print "working on directory: %s" % currentDir
patch_headers(currentDir, new_file_ext='', overwrite=True)
add_object_info(currentDir, new_file_ext='', overwrite=True)
if __name__ == "__main__":
import argparse
raw_help_format = argparse.RawDescriptionHelpFormatter
parser = argparse.ArgumentParser(epilog=__doc__,
formatter_class=raw_help_format)
parser.add_argument("directories", metavar='dir', nargs='+')
parser.parse_args()
args = parser.parse_args()
patch_directories(args.directories)
| Change to script and add significant documentation | Change to script and add significant documentation
| Python | bsd-3-clause | mwcraig/msumastro | ---
+++
@@ -1,8 +1,65 @@
+"""
+SYNOPSIS
+
+ python run_patch.py dir1 [dir2 dir3 ...]
+
+DESCRIPTION
+
+ For each directory dir1, dir2, ... provided on the command line the
+ headers all of the FITS files in that directory are modified
+ to add information like LST, apparent object position, and more.
+ See the full documentation for a list of the specific keywords
+ that are modified.
+
+ This is basically a wrapper around the function `patch_headers` with
+ the options set so that:
+
+ + "Bad" keywords written by MaxImDL 5 are purged.
+ + Additional useful times like LST, JD are added to the header.
+ + Apparent position (Alt/Az, hour angle) are added to the header.
+ + Information about overscan is added to the header.
+ + Files are overwritten.
+
+ For more control over what is patched and where the patched files are saved
+ see the documentation for ``patch_headers`` at
+ :func:`patch_headers.patch_headers`.
+
+ .. Note::
+ This script is **NOT RECURSIVE**; it will not process files in
+ subdirectories of the the directories supplied on the command line.
+
+ .. WARNING::
+ This script OVERWRITES the image files in the directories
+ specified on the command line.
+
+EXAMPLES
+
+ Invoking this script from the command line::
+
+ python run_patch.py /my/folder/of/images
+
+ To work on the same folder from within python, do this::
+
+ from run_patch import patch_directories
+ patch_directories('/my/folder/of/images')
+"""
+
from patch_headers import patch_headers, add_object_info
-import sys
-for currentDir in sys.argv[1:]:
- print "working on directory: %s" % currentDir
- patch_headers(currentDir, new_file_ext='', overwrite=True)
- add_object_info(currentDir, new_file_ext='', overwrite=True)
- #add_overscan(currentDir, new_file_ext='', overwrite=True)
+
+def patch_directories(directories):
+ for currentDir in directories:
+ print "working on directory: %s" % currentDir
+ patch_headers(currentDir, new_file_ext='', overwrite=True)
+ add_object_info(currentDir, new_file_ext='', overwrite=True)
+
+if __name__ == "__main__":
+ import argparse
+ raw_help_format = argparse.RawDescriptionHelpFormatter
+ parser = argparse.ArgumentParser(epilog=__doc__,
+ formatter_class=raw_help_format)
+ parser.add_argument("directories", metavar='dir', nargs='+')
+ parser.parse_args()
+
+ args = parser.parse_args()
+ patch_directories(args.directories) |
f8604ddaa6e821960acb31b9aaea98a2dfd5b422 | cabot/celeryconfig.py | cabot/celeryconfig.py | import os
from datetime import timedelta
BROKER_URL = os.environ['CELERY_BROKER_URL']
CELERY_IMPORTS = ('cabot.cabotapp.tasks', )
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
CELERY_TASK_SERIALIZER = "json"
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERYD_TASK_SOFT_TIME_LIMIT = 120
CELERYD_TASK_TIME_LIMIT = 150
CELERYBEAT_SCHEDULE = {
'run-all-checks': {
'task': 'cabot.cabotapp.tasks.run_all_checks',
'schedule': timedelta(seconds=60),
},
'update-shifts': {
'task': 'cabot.cabotapp.tasks.update_shifts',
'schedule': timedelta(seconds=1800),
},
'clean-db': {
'task': 'app.cabotapp.tasks.clean_db',
'schedule': timedelta(seconds=60*60*24),
},
}
CELERY_TIMEZONE = 'UTC'
| import os
from datetime import timedelta
BROKER_URL = os.environ['CELERY_BROKER_URL']
CELERY_IMPORTS = ('cabot.cabotapp.tasks', )
CELERYBEAT_SCHEDULER = "djcelery.schedulers.DatabaseScheduler"
CELERY_TASK_SERIALIZER = "json"
CELERY_ACCEPT_CONTENT = ['json', 'msgpack', 'yaml']
CELERYD_TASK_SOFT_TIME_LIMIT = 120
CELERYD_TASK_TIME_LIMIT = 150
CELERYBEAT_SCHEDULE = {
'run-all-checks': {
'task': 'cabot.cabotapp.tasks.run_all_checks',
'schedule': timedelta(seconds=60),
},
'update-shifts': {
'task': 'cabot.cabotapp.tasks.update_shifts',
'schedule': timedelta(seconds=1800),
},
'clean-db': {
'task': 'cabot.cabotapp.tasks.clean_db',
'schedule': timedelta(seconds=60*60*24),
},
}
CELERY_TIMEZONE = 'UTC'
| Fix task path for cleanup | Fix task path for cleanup | Python | mit | Affirm/cabot,BillGuard/cabot,movermeyer/cabot,BillGuard/cabot,bonniejools/cabot,spladug/cabot,movermeyer/cabot,dever860/cabot,iurisilvio/cabot,spladug/cabot,BillGuard/cabot,mcansky/cabotapp,reddit/cabot,mcansky/cabotapp,xinity/cabot,dever860/cabot,Affirm/cabot,dever860/cabot,iurisilvio/cabot,arachnys/cabot,arachnys/cabot,Affirm/cabot,BillGuard/cabot,cmclaughlin/cabot,mcansky/cabotapp,lghamie/cabot,maks-us/cabot,spladug/cabot,lghamie/cabot,doctolib/cabot,bonniejools/cabot,cmclaughlin/cabot,iurisilvio/cabot,reddit/cabot,robocopio/cabot,reddit/cabot,spladug/cabot,robocopio/cabot,bonniejools/cabot,arachnys/cabot,maks-us/cabot,jdycar/cabot,jdycar/cabot,movermeyer/cabot,dever860/cabot,doctolib/cabot,robocopio/cabot,robocopio/cabot,reddit/cabot,lghamie/cabot,xinity/cabot,arachnys/cabot,maks-us/cabot,mcansky/cabotapp,doctolib/cabot,Affirm/cabot,movermeyer/cabot,iurisilvio/cabot,bonniejools/cabot,xinity/cabot,doctolib/cabot,xinity/cabot,cmclaughlin/cabot,jdycar/cabot,lghamie/cabot,jdycar/cabot,cmclaughlin/cabot,maks-us/cabot | ---
+++
@@ -19,7 +19,7 @@
'schedule': timedelta(seconds=1800),
},
'clean-db': {
- 'task': 'app.cabotapp.tasks.clean_db',
+ 'task': 'cabot.cabotapp.tasks.clean_db',
'schedule': timedelta(seconds=60*60*24),
},
} |
c1f4b466c9acbcb4569bab183f7e932125c833e9 | management/commands/startbot.py | management/commands/startbot.py | from django.core.management.base import BaseCommand
import importlib
import asyncio
class Command(BaseCommand):
def handle(self, *args, **options):
loop = asyncio.get_event_loop()
bot = None
while True:
bot_module = importlib.import_module('dwarf.bot')
bot = bot_module.main(loop=loop, bot=bot)
if not bot.base.restarting_enabled():
break
else:
bot.clear()
| from django.core.management.base import BaseCommand
from django.conf import settings
import importlib
import asyncio
class Command(BaseCommand):
def handle(self, *args, **options):
loop = asyncio.get_event_loop()
if settings.DEBUG:
loop.set_debug(True)
bot = None
while True:
bot_module = importlib.import_module('dwarf.bot')
bot = bot_module.main(loop=loop, bot=bot)
if not bot.base.restarting_enabled():
break
else:
bot.clear()
| Enable asyncio debugging if settings.DEBUG is True | Enable asyncio debugging if settings.DEBUG is True
| Python | mit | Dwarf-Community/Dwarf | ---
+++
@@ -1,4 +1,5 @@
from django.core.management.base import BaseCommand
+from django.conf import settings
import importlib
import asyncio
@@ -8,6 +9,8 @@
def handle(self, *args, **options):
loop = asyncio.get_event_loop()
+ if settings.DEBUG:
+ loop.set_debug(True)
bot = None
while True:
bot_module = importlib.import_module('dwarf.bot') |
c4009fdedc1625fe3692c689242d9f32a1c89f97 | tests/services/conftest.py | tests/services/conftest.py | import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
requests_mock = RequestsMock()
requests_mock._start()
yield requests_mock
requests_mock._stop()
requests_mock.reset()
| import pytest
from responses import RequestsMock
from netvisor import Netvisor
@pytest.fixture
def netvisor():
kwargs = dict(
sender='Test client',
partner_id='xxx_yyy',
partner_key='E2CEBB1966C7016730C70CA92CBB93DD',
customer_id='xx_yyyy_zz',
customer_key='7767899D6F5FB333784A2520771E5871',
organization_id='1967543-8',
language='EN'
)
return Netvisor(host='http://koulutus.netvisor.fi', **kwargs)
@pytest.yield_fixture(autouse=True)
def responses():
r = RequestsMock()
with r:
yield r
| Fix tests to work with responses 0.3.0 | Fix tests to work with responses 0.3.0
| Python | mit | fastmonkeys/netvisor.py | ---
+++
@@ -20,8 +20,6 @@
@pytest.yield_fixture(autouse=True)
def responses():
- requests_mock = RequestsMock()
- requests_mock._start()
- yield requests_mock
- requests_mock._stop()
- requests_mock.reset()
+ r = RequestsMock()
+ with r:
+ yield r |
e946f239695f74d83fcb1b4929ed2281846add4c | avalon/fusion/pipeline.py | avalon/fusion/pipeline.py |
def imprint_container(tool,
name,
namespace,
context,
loader=None):
"""Imprint a Loader with metadata
Containerisation enables a tracking of version, author and origin
for loaded assets.
Arguments:
tool (object): The node in Fusion to imprint as container, usually a
Loader.
name (str): Name of resulting assembly
namespace (str): Namespace under which to host container
context (dict): Asset information
loader (str, optional): Name of loader used to produce this container.
Returns:
None
"""
data = [
("schema", "avalon-core:container-2.0"),
("id", "pyblish.avalon.container"),
("name", str(name)),
("namespace", str(namespace)),
("loader", str(loader)),
("representation", str(context["representation"]["_id"])),
]
for key, value in data:
tool.SetData("avalon.{}".format(key), value)
def parse_container(tool):
"""Returns imprinted container data of a tool
This reads the imprinted data from `imprint_container`.
"""
container = {}
for key in ['schema', 'id', 'name', 'namespace',
'loader', 'representation']:
value = tool.GetData('avalon.{}'.format(key))
container[key] = value
return container
|
def imprint_container(tool,
name,
namespace,
context,
loader=None):
"""Imprint a Loader with metadata
Containerisation enables a tracking of version, author and origin
for loaded assets.
Arguments:
tool (object): The node in Fusion to imprint as container, usually a
Loader.
name (str): Name of resulting assembly
namespace (str): Namespace under which to host container
context (dict): Asset information
loader (str, optional): Name of loader used to produce this container.
Returns:
None
"""
data = [
("schema", "avalon-core:container-2.0"),
("id", "pyblish.avalon.container"),
("name", str(name)),
("namespace", str(namespace)),
("loader", str(loader)),
("representation", str(context["representation"]["_id"])),
]
for key, value in data:
tool.SetData("avalon.{}".format(key), value)
def parse_container(tool):
"""Returns imprinted container data of a tool
This reads the imprinted data from `imprint_container`.
"""
container = {}
for key in ['schema', 'id', 'name', 'namespace',
'loader', 'representation']:
value = tool.GetData('avalon.{}'.format(key))
container[key] = value
# Store the tool's name
container["objectName"] = tool.Name
return container
| Store tool's name when parsing container | Store tool's name when parsing container
| Python | mit | MoonShineVFX/core,MoonShineVFX/core,getavalon/core,getavalon/core,mindbender-studio/core,mindbender-studio/core | ---
+++
@@ -38,9 +38,9 @@
def parse_container(tool):
"""Returns imprinted container data of a tool
-
+
This reads the imprinted data from `imprint_container`.
-
+
"""
container = {}
for key in ['schema', 'id', 'name', 'namespace',
@@ -48,4 +48,7 @@
value = tool.GetData('avalon.{}'.format(key))
container[key] = value
+ # Store the tool's name
+ container["objectName"] = tool.Name
+
return container |
6b3dd31b1a795a92a00c7dba636a88636018655c | tests/blueprints/admin/conftest.py | tests/blueprints/admin/conftest.py | """
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
Fixtures specific to admin blueprints
"""
import pytest
from tests.base import create_admin_app
from tests.conftest import database_recreated
@pytest.fixture(scope='session')
def admin_app_without_db(db):
app = create_admin_app()
with app.app_context():
yield app
@pytest.fixture(scope='module')
def app(admin_app_without_db, db):
app = admin_app_without_db
with database_recreated(db):
yield app
| """
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
Fixtures specific to admin blueprints
"""
import pytest
from tests.conftest import database_recreated
@pytest.fixture(scope='module')
def app(admin_app, db):
app = admin_app
with app.app_context():
with database_recreated(db):
yield app
| Use existing `admin_app` fixture for admin tests | Use existing `admin_app` fixture for admin tests
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps | ---
+++
@@ -7,19 +7,12 @@
import pytest
-from tests.base import create_admin_app
from tests.conftest import database_recreated
-@pytest.fixture(scope='session')
-def admin_app_without_db(db):
- app = create_admin_app()
+@pytest.fixture(scope='module')
+def app(admin_app, db):
+ app = admin_app
with app.app_context():
- yield app
-
-
-@pytest.fixture(scope='module')
-def app(admin_app_without_db, db):
- app = admin_app_without_db
- with database_recreated(db):
- yield app
+ with database_recreated(db):
+ yield app |
ed8add36f605def54e53c34627a1eedeefb145e5 | project/api/forms.py | project/api/forms.py | # Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
| # Django
from django import forms
# Local
from .models import User
class UserCreationForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
user.name = self.cleaned_data['name'].name
user.set_password(None)
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
class Meta:
model = User
fields = '__all__'
| Create user name on Admin form create | Create user name on Admin form create
| Python | bsd-2-clause | dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django,barberscore/barberscore-api,barberscore/barberscore-api | ---
+++
@@ -14,6 +14,7 @@
def save(self, commit=True):
user = super().save(commit=False)
user.email = self.cleaned_data['person'].email.lower()
+ user.name = self.cleaned_data['name'].name
user.set_password(None)
if commit:
user.save() |
ed88d9b598b3bd360a6575d83ffd3d4044846a96 | traits/tests/test_array.py | traits/tests/test_array.py | #------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#------------------------------------------------------------------------------
from __future__ import absolute_import
from traits.testing.unittest_tools import unittest
try:
import numpy
except ImportError:
numpy_available = False
else:
numpy_available = True
from ..api import Array, Bool, HasTraits
class Foo(HasTraits):
a = Array()
event_fired = Bool(False)
def _a_changed(self):
self.event_fired = True
class ArrayTestCase(unittest.TestCase):
""" Test cases for delegated traits. """
@unittest.skipUnless(numpy_available, "test requires the NumPy package")
def test_zero_to_one_element(self):
""" Test that an event fires when an Array trait changes from zero to
one element.
"""
f = Foo()
f.a = numpy.zeros((2,), float)
f.event_fired = False
# Change the array.
f.a = numpy.concatenate((f.a, numpy.array([100])))
# Confirm that the static trait handler was invoked.
self.assertEqual(f.event_fired, True)
return
#### EOF ######################################################################
| #------------------------------------------------------------------------------
# Copyright (c) 2005, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#------------------------------------------------------------------------------
from __future__ import absolute_import
from traits.testing.unittest_tools import unittest
try:
import numpy
except ImportError:
numpy_available = False
else:
numpy_available = True
from ..api import Array, Bool, HasTraits
if numpy_available:
# Use of `Array` requires NumPy to be installed.
class Foo(HasTraits):
a = Array()
event_fired = Bool(False)
def _a_changed(self):
self.event_fired = True
class ArrayTestCase(unittest.TestCase):
""" Test cases for delegated traits. """
@unittest.skipUnless(numpy_available, "numpy not available")
def test_zero_to_one_element(self):
""" Test that an event fires when an Array trait changes from zero to
one element.
"""
f = Foo()
f.a = numpy.zeros((2,), float)
f.event_fired = False
# Change the array.
f.a = numpy.concatenate((f.a, numpy.array([100])))
# Confirm that the static trait handler was invoked.
self.assertEqual(f.event_fired, True)
return
#### EOF ######################################################################
| Make definition of conditional on NumPy being installed; update skip message to match that used elsewhere | Make definition of conditional on NumPy being installed; update skip message to match that used elsewhere
| Python | bsd-3-clause | burnpanck/traits,burnpanck/traits | ---
+++
@@ -23,18 +23,21 @@
from ..api import Array, Bool, HasTraits
-class Foo(HasTraits):
- a = Array()
- event_fired = Bool(False)
+if numpy_available:
+ # Use of `Array` requires NumPy to be installed.
- def _a_changed(self):
- self.event_fired = True
+ class Foo(HasTraits):
+ a = Array()
+ event_fired = Bool(False)
+
+ def _a_changed(self):
+ self.event_fired = True
class ArrayTestCase(unittest.TestCase):
""" Test cases for delegated traits. """
- @unittest.skipUnless(numpy_available, "test requires the NumPy package")
+ @unittest.skipUnless(numpy_available, "numpy not available")
def test_zero_to_one_element(self):
""" Test that an event fires when an Array trait changes from zero to
one element. |
ce39e4a5573e7b3a882ee4a327b3c9eb088d1d07 | senlin/profiles/container/docker.py | senlin/profiles/container/docker.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.common.i18n import _
from senlin.common import schema
from senlin.profiles import base
class DockerProfile(base.Profile):
"""Profile for a docker container."""
KEYS = (
CONTEXT, IMAGE, NAME, COMMAND,
) = (
'context', 'image', 'name', 'command',
)
properties_schema = {
CONTEXT: schema.Map(
_('Customized security context for operationg containers.')
),
IMAGE: schema.String(
_('The image used to create a container')
),
NAME: schema.String(
_('The name of the container.')
),
COMMAND: schema.String(
_('The command to run when container is started.')
),
}
def __init__(self, type_name, name, **kwargs):
super(DockerProfile, self).__init__(type_name, name, **kwargs)
self._dockerclient = None
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from senlin.common.i18n import _
from senlin.common import schema
from senlin.profiles import base
class DockerProfile(base.Profile):
"""Profile for a docker container."""
KEYS = (
CONTEXT, IMAGE, NAME, COMMAND, HOST_NODE, HOST_CLUSTER
) = (
'context', 'image', 'name', 'command', 'host_node', 'host_cluster',
)
properties_schema = {
CONTEXT: schema.Map(
_('Customized security context for operationg containers.')
),
IMAGE: schema.String(
_('The image used to create a container')
),
NAME: schema.String(
_('The name of the container.')
),
COMMAND: schema.String(
_('The command to run when container is started.')
),
HOST_NODE: schema.String(
_('The node on which container will be launched.')
),
HOST_CLUSTER: schema.String(
_('The cluster on which container cluster will be launched.')
),
}
def __init__(self, type_name, name, **kwargs):
super(DockerProfile, self).__init__(type_name, name, **kwargs)
self._dockerclient = None
| Add 'host_node' and 'host_cluster' properties to container profile | Add 'host_node' and 'host_cluster' properties to container profile
Add 'host_node' and 'host_cluster' properties to container profile,
in a container profile, either 'host_node' or 'host_cluster' will
be assigned a value for a container node creation or a container
cluster creation.
blueprint container-profile-support
Change-Id: Ief464375bf651ebe1770c3fcf0488f29b25a94f4
| Python | apache-2.0 | stackforge/senlin,openstack/senlin,stackforge/senlin,openstack/senlin,openstack/senlin | ---
+++
@@ -19,9 +19,9 @@
"""Profile for a docker container."""
KEYS = (
- CONTEXT, IMAGE, NAME, COMMAND,
+ CONTEXT, IMAGE, NAME, COMMAND, HOST_NODE, HOST_CLUSTER
) = (
- 'context', 'image', 'name', 'command',
+ 'context', 'image', 'name', 'command', 'host_node', 'host_cluster',
)
properties_schema = {
@@ -37,6 +37,12 @@
COMMAND: schema.String(
_('The command to run when container is started.')
),
+ HOST_NODE: schema.String(
+ _('The node on which container will be launched.')
+ ),
+ HOST_CLUSTER: schema.String(
+ _('The cluster on which container cluster will be launched.')
+ ),
}
def __init__(self, type_name, name, **kwargs): |
d357136075bce9d8582759a525536daf7489becb | unitypack/engine/object.py | unitypack/engine/object.py | def field(f, cast=None):
def _inner(self):
ret = self._obj[f]
if cast:
ret = cast(ret)
return ret
return property(_inner)
class Object:
def __init__(self, data=None):
if data is None:
data = {}
self._obj = data
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.name)
def __str__(self):
return self.name
name = field("m_Name")
class GameObject(Object):
active = field("m_IsActive")
component = field("m_Component")
layer = field("m_Layer")
tag = field("m_Tag")
| def field(f, cast=None, **kwargs):
def _inner(self):
if "default" in kwargs:
ret = self._obj.get(f, kwargs["default"])
else:
ret = self._obj[f]
if cast:
ret = cast(ret)
return ret
return property(_inner)
class Object:
def __init__(self, data=None):
if data is None:
data = {}
self._obj = data
def __repr__(self):
return "<%s %s>" % (self.__class__.__name__, self.name)
def __str__(self):
return self.name
name = field("m_Name", default="")
class GameObject(Object):
active = field("m_IsActive")
component = field("m_Component")
layer = field("m_Layer")
tag = field("m_Tag")
| Allow default values in field() | Allow default values in field()
| Python | mit | andburn/python-unitypack | ---
+++
@@ -1,6 +1,9 @@
-def field(f, cast=None):
+def field(f, cast=None, **kwargs):
def _inner(self):
- ret = self._obj[f]
+ if "default" in kwargs:
+ ret = self._obj.get(f, kwargs["default"])
+ else:
+ ret = self._obj[f]
if cast:
ret = cast(ret)
return ret
@@ -19,7 +22,7 @@
def __str__(self):
return self.name
- name = field("m_Name")
+ name = field("m_Name", default="")
class GameObject(Object): |
be30b8220900eaf549fbe1bff7a1e7c3a9be8529 | settings_test.py | settings_test.py | # The test system uses this to override settings in settings.py and
# settings_local.py with settings appropriate for testing.
# Make sure Celery is EAGER.
CELERY_ALWAYS_EAGER = True
# Make sure the doctypes (the keys) match the doctypes in ES_INDEXES
# in settings.py and settings_local.py.
ES_INDEXES = {'default': 'sumo_test'}
ES_WRITE_INDEXES = ES_INDEXES
# This makes sure we only turn on ES stuff when we're testing ES
# stuff.
ES_LIVE_INDEXING = False
# Make sure we use port 6383 db 2 redis for tests. That's db 2 of the
# redis test config. That shouldn't collide with anything else.
REDIS_BACKENDS = {
'default': 'redis://localhost:6383?socket_timeout=0.5&db=2',
'karma': 'redis://localhost:6383?socket_timeout=0.5&db=2',
'helpfulvotes': 'redis://localhost:6383?socket_timeout=0.5&db=2',
}
# Use fake webtrends settings.
WEBTRENDS_PROFILE_ID = 'ABC123'
| from django.conf import settings
# The test system uses this to override settings in settings.py and
# settings_local.py with settings appropriate for testing.
# Make sure Celery is EAGER.
CELERY_ALWAYS_EAGER = True
# Make sure the doctypes (the keys) match the doctypes in ES_INDEXES
# in settings.py and settings_local.py.
ES_INDEXES = {'default': 'sumo_test' + settings.ES_INDEX_PREFIX}
ES_WRITE_INDEXES = ES_INDEXES
# This makes sure we only turn on ES stuff when we're testing ES
# stuff.
ES_LIVE_INDEXING = False
# Make sure we use port 6383 db 2 redis for tests. That's db 2 of the
# redis test config. That shouldn't collide with anything else.
REDIS_BACKENDS = {
'default': 'redis://localhost:6383?socket_timeout=0.5&db=2',
'karma': 'redis://localhost:6383?socket_timeout=0.5&db=2',
'helpfulvotes': 'redis://localhost:6383?socket_timeout=0.5&db=2',
}
# Use fake webtrends settings.
WEBTRENDS_PROFILE_ID = 'ABC123'
| Make settings test respect ES_INDEX_PREFIX. | Make settings test respect ES_INDEX_PREFIX.
| Python | bsd-3-clause | chirilo/kitsune,H1ghT0p/kitsune,feer56/Kitsune1,NewPresident1/kitsune,safwanrahman/kitsune,asdofindia/kitsune,brittanystoroz/kitsune,MikkCZ/kitsune,H1ghT0p/kitsune,philipp-sumo/kitsune,mozilla/kitsune,safwanrahman/kitsune,Osmose/kitsune,YOTOV-LIMITED/kitsune,philipp-sumo/kitsune,feer56/Kitsune1,dbbhattacharya/kitsune,anushbmx/kitsune,silentbob73/kitsune,turtleloveshoes/kitsune,asdofindia/kitsune,mozilla/kitsune,feer56/Kitsune1,feer56/Kitsune2,YOTOV-LIMITED/kitsune,mozilla/kitsune,YOTOV-LIMITED/kitsune,mythmon/kitsune,orvi2014/kitsune,MikkCZ/kitsune,YOTOV-LIMITED/kitsune,rlr/kitsune,dbbhattacharya/kitsune,MziRintu/kitsune,NewPresident1/kitsune,asdofindia/kitsune,anushbmx/kitsune,H1ghT0p/kitsune,feer56/Kitsune2,rlr/kitsune,brittanystoroz/kitsune,iDTLabssl/kitsune,Osmose/kitsune,brittanystoroz/kitsune,rlr/kitsune,safwanrahman/kitsune,asdofindia/kitsune,mythmon/kitsune,safwanrahman/kitsune,philipp-sumo/kitsune,orvi2014/kitsune,silentbob73/kitsune,Osmose/kitsune,dbbhattacharya/kitsune,orvi2014/kitsune,mozilla/kitsune,chirilo/kitsune,dbbhattacharya/kitsune,orvi2014/kitsune,iDTLabssl/kitsune,anushbmx/kitsune,silentbob73/kitsune,H1ghT0p/kitsune,turtleloveshoes/kitsune,iDTLabssl/kitsune,safwanrahman/linuxdesh,mythmon/kitsune,turtleloveshoes/kitsune,mythmon/kitsune,NewPresident1/kitsune,chirilo/kitsune,safwanrahman/linuxdesh,NewPresident1/kitsune,anushbmx/kitsune,MikkCZ/kitsune,feer56/Kitsune2,safwanrahman/linuxdesh,iDTLabssl/kitsune,MziRintu/kitsune,Osmose/kitsune,MikkCZ/kitsune,rlr/kitsune,brittanystoroz/kitsune,chirilo/kitsune,MziRintu/kitsune,feer56/Kitsune2,MziRintu/kitsune,turtleloveshoes/kitsune,silentbob73/kitsune | ---
+++
@@ -1,3 +1,4 @@
+from django.conf import settings
# The test system uses this to override settings in settings.py and
# settings_local.py with settings appropriate for testing.
@@ -6,7 +7,7 @@
# Make sure the doctypes (the keys) match the doctypes in ES_INDEXES
# in settings.py and settings_local.py.
-ES_INDEXES = {'default': 'sumo_test'}
+ES_INDEXES = {'default': 'sumo_test' + settings.ES_INDEX_PREFIX}
ES_WRITE_INDEXES = ES_INDEXES
# This makes sure we only turn on ES stuff when we're testing ES |
99774f3a525e59c65414606bca6a2f309f5ce140 | stageflip.py | stageflip.py | """
Helper functions for pushing content to a staging directory, moving the old directory aside, and moving the staging directory into place.
"""
from fabric.api import env
from fabric.operations import sudo
from . import debug
from os import path
import time
import re
def make_staging_directory(basename = "project", parent = "/opt"):
dir_tmp = path.join(parent,basename) + time.strftime("_%Y%m%d_%H%M%S") + ".deploying"
sudo('mkdir -p %s' % dir_tmp)
sudo("chown %s:%s %s" % (env.user, env.group, dir_tmp))
return dir_tmp
def flip(staging_dir):
active_dir = re.sub(r'_[0-9]{8}_[0-9]{6}.deploying','',staging_dir)
retired_dir = active_dir + time.strftime("_%Y%m%d_%H%M%S") + ".retired"
debug("Flipping directory name.")
sudo("mv %s %s.retired" % (active_dir,retired_dir), quiet=True)
sudo("mv %s %s" % (staging_dir,active_dir))
| """
Helper functions for pushing content to a staging directory, moving the old directory aside, and moving the staging directory into place.
"""
from fabric.api import env
from fabric.operations import sudo
from . import debug
from os import path
import time
import re
def make_staging_directory(basename = "project", parent = "/opt"):
dir_tmp = path.join(parent,basename) + time.strftime("_%Y%m%d_%H%M%S") + ".deploying"
sudo('mkdir -p %s' % dir_tmp)
sudo("chown %s:%s %s" % (env.user, env.group, dir_tmp))
return dir_tmp
def flip(staging_dir):
active_dir = re.sub(r'_[0-9]{8}_[0-9]{6}.deploying','',staging_dir)
retired_dir = active_dir + time.strftime("_%Y%m%d_%H%M%S") + ".retired"
debug("Flipping directory name.")
sudo("mv %s %s.retired" % (active_dir,retired_dir), quiet=True)
sudo("mv %s %s" % (staging_dir,active_dir))
return active_dir
| Return active dir for use by clients. | Return active dir for use by clients.
| Python | bsd-2-clause | Multifarious/fabulous | ---
+++
@@ -20,3 +20,4 @@
debug("Flipping directory name.")
sudo("mv %s %s.retired" % (active_dir,retired_dir), quiet=True)
sudo("mv %s %s" % (staging_dir,active_dir))
+ return active_dir |
9af78701228df0decee22854eae1fbb306d90068 | cactusbot/handlers/spam.py | cactusbot/handlers/spam.py | """Handle incoming spam messages."""
from ..handler import Handler
import logging
import json
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
packet = json.loads(packet)
# exceeds_caps = self.check_caps(''.join(chunk for chunk in packet if chunk["type"] == "text"))
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
return sum(char.isupper() - char.islower() for char in message["text"]) > self.MAX_SCORE
| """Handle incoming spam messages."""
from ..handler import Handler
import logging
class SpamHandler(Handler):
"""Spam handler."""
MAX_SCORE = 16
MAX_EMOTES = 6
ALLOW_LINKS = False
def __init__(self):
self.logger = logging.getLogger(__name__)
def on_message(self, packet):
"""Handle message events."""
built_message = ""
for chunk in packet:
if chunk["type"] == "text":
built_message += chunk["text"]
exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
def check_links(self, packet):
return not self.ALLOW_LINKS and any(chunk["type"] == "link" for chunk in packet)
def check_emotes(self, packet):
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE
| Fix json loading, fix capital checker | Fix json loading, fix capital checker
| Python | mit | CactusDev/CactusBot | ---
+++
@@ -3,7 +3,6 @@
from ..handler import Handler
import logging
-import json
class SpamHandler(Handler):
"""Spam handler."""
@@ -17,12 +16,15 @@
def on_message(self, packet):
"""Handle message events."""
- packet = json.loads(packet)
- # exceeds_caps = self.check_caps(''.join(chunk for chunk in packet if chunk["type"] == "text"))
+ built_message = ""
+ for chunk in packet:
+ if chunk["type"] == "text":
+ built_message += chunk["text"]
+ exceeds_caps = self.check_caps(built_message)
contains_emotes = self.check_emotes(packet)
has_links = self.check_links(packet)
- if contains_emotes or has_links:
+ if exceeds_caps or contains_emotes or has_links:
return True
else:
return False
@@ -34,4 +36,4 @@
return sum(chunk["type"] == "emote" for chunk in packet) > self.MAX_EMOTES
def check_caps(self, message):
- return sum(char.isupper() - char.islower() for char in message["text"]) > self.MAX_SCORE
+ return sum(char.isupper() - char.islower() for char in message) > self.MAX_SCORE |
9127e56a26e836c7e2a66359a9f9b67e6c7f8474 | ovp_users/tests/test_filters.py | ovp_users/tests/test_filters.py | from django.test import TestCase
from ovp_users.recover_password import RecoveryTokenFilter
from ovp_users.recover_password import RecoverPasswordFilter
def test_filter(c):
obj = c()
obj.filter_queryset('a', 'b', 'c')
obj.get_fields('a')
def TestPasswordRecoveryFilters(TestCase):
def test_filters():
"""Assert filters do not throw error when instantiated"""
# Nothing to assert here, we just instantiate them and
# make sure it throws no error
test_filter(RecoveryTokenFilter)
test_filter(RecoverPasswordFilter)
| from django.test import TestCase
from ovp_users.recover_password import RecoveryTokenFilter
from ovp_users.recover_password import RecoverPasswordFilter
def test_filter(c):
obj = c()
obj.filter_queryset('a', 'b', 'c')
obj.get_fields('a')
def PasswordRecoveryFiltersTestCase(TestCase):
def test_filters():
"""Assert filters do not throw error when instantiated"""
# Nothing to assert here, we just instantiate them and
# make sure it throws no error
test_filter(RecoveryTokenFilter)
test_filter(RecoverPasswordFilter)
| Fix PasswordRecovery test case name | Fix PasswordRecovery test case name
| Python | agpl-3.0 | OpenVolunteeringPlatform/django-ovp-users,OpenVolunteeringPlatform/django-ovp-users | ---
+++
@@ -8,7 +8,7 @@
obj.filter_queryset('a', 'b', 'c')
obj.get_fields('a')
-def TestPasswordRecoveryFilters(TestCase):
+def PasswordRecoveryFiltersTestCase(TestCase):
def test_filters():
"""Assert filters do not throw error when instantiated"""
# Nothing to assert here, we just instantiate them and |
1b502cdf399b5b9cd4593aea82750b77114fe858 | examples/flask_hello.py | examples/flask_hello.py | from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
| import time
from pyinstrument import Profiler
try:
from flask import Flask, g, make_response, request
except ImportError:
print('This example requires Flask.')
print('Install using `pip install flask`.')
exit(1)
app = Flask(__name__)
@app.before_request
def before_request():
if "profile" in request.args:
g.profiler = Profiler()
g.profiler.start()
@app.after_request
def after_request(response):
if not hasattr(g, "profiler"):
return response
g.profiler.stop()
output_html = g.profiler.output_html()
return make_response(output_html)
@app.route('/')
def hello_world():
return 'Hello, World!'
@app.route('/sleep')
def sleep():
time.sleep(0.1)
return 'Good morning!'
@app.route('/dosomething')
def do_something():
import requests
requests.get('http://google.com')
return 'Google says hello!'
| Add some more endpoints to the flask example | Add some more endpoints to the flask example
| Python | bsd-3-clause | joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument,joerick/pyinstrument | ---
+++
@@ -1,3 +1,4 @@
+import time
from pyinstrument import Profiler
try:
@@ -27,3 +28,16 @@
@app.route('/')
def hello_world():
return 'Hello, World!'
+
+
+@app.route('/sleep')
+def sleep():
+ time.sleep(0.1)
+ return 'Good morning!'
+
+
+@app.route('/dosomething')
+def do_something():
+ import requests
+ requests.get('http://google.com')
+ return 'Google says hello!' |
c8c186e46990797e1faa07c71fb57920a89a2dcc | webquills/core/commands.py | webquills/core/commands.py | """
Business logic in this app is implemented using a CQRS style. Commands should
be implemented as functions here. Queries should be implemented as methods on
Django model managers. Commands can then be called from a management command
(i.e. the CLI), a view, a signal, etc.
"""
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
def initialize_site():
"""
For development environments, set up the Site and home page objects.
"""
try:
site = Site.objects.get(id=settings.SITE_ID)
# If the default site was already created, just update its properties
site.domain = "webquills.com"
site.name = "WebQuills"
except Site.DoesNotExist:
site = Site.objects.create(
id=settings.SITE_ID,
domain="webquills.com",
name="WebQuills",
)
site.save()
| """
Business logic in this app is implemented using a CQRS style. Commands should
be implemented as functions here. Queries should be implemented as methods on
Django model managers. Commands can then be called from a management command
(i.e. the CLI), a view, a signal, etc.
"""
from django.conf import settings
from django.contrib.sites.models import Site
from webquills.core.models import SiteMeta
def initialize_site():
"""
For development environments, set up the Site and home page objects.
"""
try:
site = Site.objects.get(id=settings.SITE_ID)
# If the default site was already created, just update its properties
site.domain = "webquills.com"
site.name = "WebQuills"
except Site.DoesNotExist:
site = Site.objects.create(
id=settings.SITE_ID,
domain="webquills.com",
name="WebQuills",
)
if not hasattr(site, "meta"):
SiteMeta.objects.create(site=site)
site.save()
| Create SiteMeta for default site | Create SiteMeta for default site
| Python | apache-2.0 | veselosky/webquills,veselosky/webquills,veselosky/webquills | ---
+++
@@ -5,8 +5,9 @@
(i.e. the CLI), a view, a signal, etc.
"""
from django.conf import settings
-from django.contrib.contenttypes.models import ContentType
from django.contrib.sites.models import Site
+
+from webquills.core.models import SiteMeta
def initialize_site():
@@ -24,5 +25,6 @@
domain="webquills.com",
name="WebQuills",
)
-
+ if not hasattr(site, "meta"):
+ SiteMeta.objects.create(site=site)
site.save() |
e24ee559a607172f0072ac9f28c90f09765ddc62 | examples/print_gcode.py | examples/print_gcode.py | import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import s3g
import serial
import optparse
parser = optparse.OptionParser()
parser.add_option("-p", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-b", "--baud", dest="serialbaud",
help="serial port baud rate", default="115200")
parser.add_option("-f", "--filename", dest="filename",
help="gcode file to print", default=False)
(options, args) = parser.parse_args()
file = serial.Serial(options.serialportname, options.serialbaud, timeout=0)
r = s3g.s3g()
r.writer = s3g.StreamWriter(file)
parser = s3g.GcodeParser()
parser.s3g = r
with open(options.filename) as f:
for line in f:
print line
parser.ExecuteLine(line)
| import os
import sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import s3g
import serial
import optparse
parser = optparse.OptionParser()
parser.add_option("-p", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-b", "--baud", dest="serialbaud",
help="serial port baud rate", default="115200")
parser.add_option("-f", "--filename", dest="filename",
help="gcode file to print", default=False)
(options, args) = parser.parse_args()
file = serial.Serial(options.serialportname, options.serialbaud, timeout=0)
r = s3g.s3g()
r.writer = s3g.Writer.StreamWriter(file)
parser = s3g.Gcode.GcodeParser()
parser.state.values["build_name"] = 'test'
parser.s3g = r
with open(options.filename) as f:
for line in f:
print line,
parser.ExecuteLine(line)
| Update print_gcide example to work with latest api changes. | Update print_gcide example to work with latest api changes.
| Python | agpl-3.0 | Jnesselr/s3g,makerbot/s3g,makerbot/s3g,Jnesselr/s3g,makerbot/s3g,makerbot/s3g | ---
+++
@@ -19,12 +19,13 @@
file = serial.Serial(options.serialportname, options.serialbaud, timeout=0)
r = s3g.s3g()
-r.writer = s3g.StreamWriter(file)
+r.writer = s3g.Writer.StreamWriter(file)
-parser = s3g.GcodeParser()
+parser = s3g.Gcode.GcodeParser()
+parser.state.values["build_name"] = 'test'
parser.s3g = r
with open(options.filename) as f:
for line in f:
- print line
+ print line,
parser.ExecuteLine(line) |
b22a9abc8b560ea3014b68b998830e8a5f5073f5 | tomviz/python/setup.py | tomviz/python/setup.py | from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy',
'itk', 'pyfftw'],
extras_require={
'interactive': ['jsonpatch', 'marshmallow']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
| from setuptools import setup, find_packages
setup(
name='tomviz-pipeline',
version='0.0.1',
description='Tomviz python external pipeline execution infrastructure.',
author='Kitware, Inc.',
author_email='kitware@kitware.com',
url='https://www.tomviz.org/',
license='BSD 3-Clause',
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: BSD 3-Clause',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
'interactive': ['jsonpatch', 'marshmallow'],
'itk': ['itk'],
'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [
'tomviz-pipeline = tomviz.cli:main'
]
}
)
| Make itk and pyfftw extra requirements | Make itk and pyfftw extra requirements
Neither are needed by the core pipeline, some operators use them. Fixes
issue #2040.
Signed-off-by: Marcus D. Hanwell <cf7042e2e8eee958b5bcde1ae2cbefef82efc184@kitware.com>
| Python | bsd-3-clause | OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz | ---
+++
@@ -19,10 +19,11 @@
'Programming Language :: Python :: 3.5'
],
packages=find_packages(),
- install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy',
- 'itk', 'pyfftw'],
+ install_requires=['tqdm', 'h5py', 'numpy==1.16.4', 'click', 'scipy'],
extras_require={
- 'interactive': ['jsonpatch', 'marshmallow']
+ 'interactive': ['jsonpatch', 'marshmallow'],
+ 'itk': ['itk'],
+ 'pyfftw': ['pyfftw']
},
entry_points={
'console_scripts': [ |
560ff8d533a2247e7b194755fb13941ffbc1f544 | IPython/nbconvert/exporters/python.py | IPython/nbconvert/exporters/python.py | """Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
output_mimetype = 'text/x-python'
| """Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
def _file_extension_default(self):
return 'py'
def _template_file_default(self):
return 'python'
output_mimetype = 'text/x-python'
| Remove magic for loading templates from module names | Remove magic for loading templates from module names
| Python | bsd-3-clause | SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,ipython/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,cornhundred/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets,cornhundred/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets | ---
+++
@@ -24,9 +24,10 @@
"""
Exports a Python code file.
"""
-
- file_extension = Unicode(
- 'py', config=True,
- help="Extension of the file that should be written to disk")
+ def _file_extension_default(self):
+ return 'py'
+
+ def _template_file_default(self):
+ return 'python'
output_mimetype = 'text/x-python' |
b6bb2185caa9f6242acc463fbaae1c7f841390da | tt_hospitals/models.py | tt_hospitals/models.py | from django.contrib.gis.db import models
class Hospital(models.Model):
file_nbr = models.CharField(max_length=20)
license_number = models.CharField(max_length=20)
name = models.CharField(max_length=250)
address = models.CharField(max_length=250)
zipcode = models.PositiveIntegerField()
latitude = models.PointField()
longitude = models.PointField()
phone = models.CharField(max_length=250)
status = models.CharField(max_length=20) # TODO: Investigate, boolean?
original_date = models.DateField()
# TODO: denormalize these fields
city = models.CharField(max_length=250)
state = models.CharField(max_length=2, default='TX')
county = models.CharField(max_length=250)
def __unicode__(self):
return "%s (%s)" % (self.name, self.city)
| from django.contrib.gis.db import models
class Hospital(models.Model):
file_nbr = models.CharField(max_length=20)
license_number = models.CharField(max_length=20)
name = models.CharField(max_length=250)
address = models.CharField(max_length=250)
zipcode = models.PositiveIntegerField()
coordinates = models.PointField()
phone = models.CharField(max_length=250)
status = models.CharField(max_length=20) # TODO: Investigate, boolean?
original_date = models.DateField()
# TODO: denormalize these fields
city = models.CharField(max_length=250)
state = models.CharField(max_length=2, default='TX')
county = models.CharField(max_length=250)
objects = models.GeoManager()
def __unicode__(self):
return "%s (%s)" % (self.name, self.city)
| Update the schema a bit and add the geo manager | Update the schema a bit and add the geo manager
| Python | apache-2.0 | texastribune/tt_hospitals,texastribune/tt_hospitals | ---
+++
@@ -7,8 +7,7 @@
name = models.CharField(max_length=250)
address = models.CharField(max_length=250)
zipcode = models.PositiveIntegerField()
- latitude = models.PointField()
- longitude = models.PointField()
+ coordinates = models.PointField()
phone = models.CharField(max_length=250)
status = models.CharField(max_length=20) # TODO: Investigate, boolean?
original_date = models.DateField()
@@ -18,5 +17,7 @@
state = models.CharField(max_length=2, default='TX')
county = models.CharField(max_length=250)
+ objects = models.GeoManager()
+
def __unicode__(self):
return "%s (%s)" % (self.name, self.city) |
188e4e6d3419793ae8811eb66d94e31849af3461 | conf_site/core/forms.py | conf_site/core/forms.py | from django import forms
class CsvUploadForm(forms.Form):
"""Form for uploading a CSV file."""
csv_file = forms.FileField(label="Please upload a CSV file.")
| from django import forms
class CsvUploadForm(forms.Form):
"""Form for uploading a CSV file."""
csv_file = forms.FileField(label="Please upload a CSV file.")
def _is_csv_file(self, file_data):
"""
Test whether an uploaded file is a CSV file.
Returns a list of a boolean of the results and the uploaded content
type.
"""
uploaded_content_type = getattr(file_data, "content_type", "text/csv")
return [uploaded_content_type == "text/csv", uploaded_content_type]
def clean_csv_file(self, *args, **kwargs):
data = super().clean(*args, **kwargs)
results = self._is_csv_file(data["csv_file"])
if not results[0]:
raise forms.ValidationError(
"Only CSV files ('text/csv') can be uploaded with this form. "
"You uploaded a '{}' file.".format(results[1])
)
return data
| Test whether uploaded CSV has correct mime type. | Test whether uploaded CSV has correct mime type.
Add cleaning method to CsvUploadForm to ensure that uploaded file has
either the mime type for a CSV file or no mime type. Return error if
user uploads a different mime type.
| Python | mit | pydata/conf_site,pydata/conf_site,pydata/conf_site | ---
+++
@@ -5,3 +5,23 @@
"""Form for uploading a CSV file."""
csv_file = forms.FileField(label="Please upload a CSV file.")
+
+ def _is_csv_file(self, file_data):
+ """
+ Test whether an uploaded file is a CSV file.
+
+ Returns a list of a boolean of the results and the uploaded content
+ type.
+ """
+ uploaded_content_type = getattr(file_data, "content_type", "text/csv")
+ return [uploaded_content_type == "text/csv", uploaded_content_type]
+
+ def clean_csv_file(self, *args, **kwargs):
+ data = super().clean(*args, **kwargs)
+ results = self._is_csv_file(data["csv_file"])
+ if not results[0]:
+ raise forms.ValidationError(
+ "Only CSV files ('text/csv') can be uploaded with this form. "
+ "You uploaded a '{}' file.".format(results[1])
+ )
+ return data |
72e1a3552a24ac9b08e5181fb1ad15f78ca6a592 | examples/uploadr/app.py | examples/uploadr/app.py | from flask import Flask, render_template
from flask_wtf import Form
from flask_wtf.file import FileField, FieldList
class FileUploadForm(Form):
uploads = FieldList(FileField())
DEBUG = True
SECRET_KEY = 'secret'
app = Flask(__name__)
app.config.from_object(__name__)
@app.route("/", methods=("GET", "POST",))
def index():
form = FileUploadForm()
for i in xrange(5):
form.uploads.append_entry()
filedata = []
if form.validate_on_submit():
for upload in form.uploads.entries:
filedata.append(upload)
return render_template("index.html",
form=form,
filedata=filedata)
if __name__ == "__main__":
app.run()
| from flask import Flask, render_template
from flask_wtf import Form
from flask_wtf.file import FileField
from wtforms import FieldList
class FileUploadForm(Form):
uploads = FieldList(FileField())
DEBUG = True
SECRET_KEY = 'secret'
app = Flask(__name__)
app.config.from_object(__name__)
@app.route("/", methods=("GET", "POST",))
def index():
form = FileUploadForm()
for i in xrange(5):
form.uploads.append_entry()
filedata = []
if form.validate_on_submit():
for upload in form.uploads.entries:
filedata.append(upload)
return render_template("index.html",
form=form,
filedata=filedata)
if __name__ == "__main__":
app.run()
| Fix example uploadr - wrong FieldList import | Fix example uploadr - wrong FieldList import
| Python | bsd-3-clause | Maxence1/flask-wtf,Maxence1/flask-wtf | ---
+++
@@ -1,7 +1,7 @@
from flask import Flask, render_template
from flask_wtf import Form
-from flask_wtf.file import FileField, FieldList
-
+from flask_wtf.file import FileField
+from wtforms import FieldList
class FileUploadForm(Form):
uploads = FieldList(FileField()) |
6995c7ca63f26a6c350fbdad5e5d194c4c1a60b0 | irco/graphs/country.py | irco/graphs/country.py | import sys
import itertools
import collections
import networkx as nx
from irco import logging
log = logging.get_logger()
def get_countries(publication):
publication_countries = set()
for affiliation in publication.affiliations:
country = affiliation.institution.country
if country is None:
print >>sys.stderr, 'Undefined country for "{}"'.format(
affiliation.institution.name)
else:
publication_countries.add(country)
return publication_countries
def create(session, publications):
g = nx.Graph()
papers_count = collections.Counter()
collaborations_count = collections.Counter()
for publication in publications:
publication_countries = get_countries(publication)
g.add_nodes_from(publication_countries)
papers_count.update(publication_countries)
collaborations = list(itertools.combinations(publication_countries, 2))
collaborations_count.update(collaborations)
g.add_edges_from(collaborations)
# Set papers count
for country, count in papers_count.iteritems():
g.node[country]['papers'] = count
# Set edge weight
for (c1, c2), count in collaborations_count.iteritems():
g[c1][c2]['weight'] = count
return g
| import sys
import itertools
import collections
import networkx as nx
from irco import logging
log = logging.get_logger()
def get_countries(publication):
publication_countries = set()
for affiliation in publication.affiliations:
country = affiliation.institution.country
if country is None:
print >>sys.stderr, 'Undefined country for "{}"'.format(
affiliation.institution.name)
else:
publication_countries.add(country)
return publication_countries
def create(session, publications):
g = nx.Graph()
papers_count = collections.Counter()
collaborations_count = collections.Counter()
for publication in publications:
publication_countries = get_countries(publication)
g.add_nodes_from(publication_countries)
papers_count.update(publication_countries)
collaborations = list(itertools.combinations(
sorted(publication_countries), 2))
collaborations_count.update(collaborations)
g.add_edges_from(collaborations)
# Set papers count
for country, count in papers_count.iteritems():
g.node[country]['papers'] = count
# Set edge weight
for (c1, c2), count in collaborations_count.iteritems():
g[c1][c2]['weight'] = count
return g
| Sort countries before generating combinations. | Sort countries before generating combinations.
| Python | mit | GaretJax/irco,GaretJax/irco,GaretJax/irco,GaretJax/irco | ---
+++
@@ -35,7 +35,8 @@
g.add_nodes_from(publication_countries)
papers_count.update(publication_countries)
- collaborations = list(itertools.combinations(publication_countries, 2))
+ collaborations = list(itertools.combinations(
+ sorted(publication_countries), 2))
collaborations_count.update(collaborations)
g.add_edges_from(collaborations)
|
1c1b8214a00adf7edc62a67575ae960fedf1d67c | irctest/irc_utils/ambiguities.py | irctest/irc_utils/ambiguities.py | """
Handles ambiguities of RFCs.
"""
def normalize_namreply_params(params):
# So… RFC 2812 says:
# "( "=" / "*" / "@" ) <channel>
# :[ "@" / "+" ] <nick> *( " " [ "@" / "+" ] <nick> )
# but spaces seem to be missing (eg. before the colon), so we
# don't know if there should be one before the <channel> and its
# prefix.
# So let's normalize this to “with space”, and strip spaces at the
# end of the nick list.
if len(params) == 3:
assert params[1][0] in "=*@", params
params.insert(1), params[1][0]
params[2] = params[2][1:]
params[3] = params[3].rstrip()
return params
| """
Handles ambiguities of RFCs.
"""
def normalize_namreply_params(params):
# So… RFC 2812 says:
# "( "=" / "*" / "@" ) <channel>
# :[ "@" / "+" ] <nick> *( " " [ "@" / "+" ] <nick> )
# but spaces seem to be missing (eg. before the colon), so we
# don't know if there should be one before the <channel> and its
# prefix.
# So let's normalize this to “with space”, and strip spaces at the
# end of the nick list.
if len(params) == 3:
assert params[1][0] in "=*@", params
params.insert(1, params[1][0])
params[2] = params[2][1:]
params[3] = params[3].rstrip()
return params
| Fix typo in function call | normalize_namreply_params: Fix typo in function call
| Python | mit | ProgVal/irctest | ---
+++
@@ -14,7 +14,7 @@
# end of the nick list.
if len(params) == 3:
assert params[1][0] in "=*@", params
- params.insert(1), params[1][0]
+ params.insert(1, params[1][0])
params[2] = params[2][1:]
params[3] = params[3].rstrip()
return params |
58c7726572ab6b6b762bbbfc3b3397729a9332cd | isaactest/tests/welcome_email.py | isaactest/tests/welcome_email.py | import time
from ..utils.log import log, INFO, ERROR, PASS
from ..utils.i_selenium import assert_tab, image_div
from ..tests import TestWithDependency
__all__ = ["welcome_email"]
#####
# Test : Welcome Email Recieved
#####
@TestWithDependency("WELCOME_EMAIL", ["SIGNUP"])
def welcome_email(driver, inbox, GUERRILLAMAIL, WAIT_DUR, **kwargs):
"""Test if the registration confirmation/welcome email is recieved.
- 'driver' should be a Selenium WebDriver.
- 'inbox' should be a GuerrillaInbox object.
- 'GUERRILLAMAIL' is the string URL of GuerrillaMail.
"""
assert_tab(driver, GUERRILLAMAIL)
inbox.wait_for_email(WAIT_DUR)
log(INFO, "GuerrillaMail: Access welcome email in inbox.")
try:
welcome_emails = inbox.get_by_subject("Welcome to Isaac Physics!")
assert len(welcome_emails) == 1, "Expected to recieve a welcome email, recieved %s emails!" % len(welcome_emails)
welcome_email = welcome_emails[0]
log(INFO, "Got welcome email as expected.")
welcome_email.image()
welcome_email.save_html_body()
log(PASS, "Welcome email recieved!")
return True
except AssertionError, e:
image_div(driver, "ERROR_not_isaac_email")
log(ERROR, e.message + " See 'ERROR_not_isaac_email.png'!")
return False
| import time
from ..utils.log import log, INFO, ERROR, PASS
from ..utils.i_selenium import assert_tab, image_div
from ..tests import TestWithDependency
__all__ = ["welcome_email"]
#####
# Test : Welcome Email Recieved
#####
@TestWithDependency("WELCOME_EMAIL", ["SIGNUP"])
def welcome_email(driver, inbox, GUERRILLAMAIL, WAIT_DUR, **kwargs):
"""Test if the registration confirmation/welcome email is recieved.
- 'driver' should be a Selenium WebDriver.
- 'inbox' should be a GuerrillaInbox object.
- 'GUERRILLAMAIL' is the string URL of GuerrillaMail.
"""
assert_tab(driver, GUERRILLAMAIL)
inbox.wait_for_email(WAIT_DUR)
log(INFO, "GuerrillaMail: Access welcome email in inbox.")
try:
welcome_emails = inbox.get_by_subject("Welcome to Isaac!")
assert len(welcome_emails) == 1, "Expected to recieve a welcome email, recieved %s emails!" % len(welcome_emails)
welcome_email = welcome_emails[0]
log(INFO, "Got welcome email as expected.")
welcome_email.image()
welcome_email.save_html_body()
log(PASS, "Welcome email recieved!")
return True
except AssertionError, e:
image_div(driver, "ERROR_not_isaac_email")
log(ERROR, e.message + " See 'ERROR_not_isaac_email.png'!")
return False
| Fix test to reflect changes to welcome email | Fix test to reflect changes to welcome email
| Python | mit | jsharkey13/isaac-selenium-testing,jsharkey13/isaac-selenium-testing | ---
+++
@@ -22,7 +22,7 @@
log(INFO, "GuerrillaMail: Access welcome email in inbox.")
try:
- welcome_emails = inbox.get_by_subject("Welcome to Isaac Physics!")
+ welcome_emails = inbox.get_by_subject("Welcome to Isaac!")
assert len(welcome_emails) == 1, "Expected to recieve a welcome email, recieved %s emails!" % len(welcome_emails)
welcome_email = welcome_emails[0]
log(INFO, "Got welcome email as expected.") |
c0d8b7f13a74fd4da7b36d30a61224b76367acbe | scraper.py | scraper.py | import urllib, datetime, os
def fetch():
url = 'http://loadmeter.egyptera.org/ClockToolTip.aspx'
output = datetime.datetime.now().strftime('egyptera.%Y-%m-%d-%H-%M-%S.html')
output = os.path.join(os.path.dirname(__file__), output)
content = urllib.urlretrieve(url, output)
if __name__ == '__main__':
fetch() | import urllib, datetime, os
def fetch():
# Instead of doing all the parsing later, I get the status from Mosab's site & store it
url = 'http://power-grid-status.mos3abof.com/status'
output = datetime.datetime.now().strftime('egyptera.%Y-%m-%d-%H-%M-%S.json')
output = os.path.join(os.path.dirname(__file__), output)
content = urllib.urlretrieve(url, output)
if __name__ == '__main__':
fetch() | Use Mosab's site to get the status | Use Mosab's site to get the status
| Python | apache-2.0 | mtayseer/power-grid-scraper | ---
+++
@@ -1,8 +1,9 @@
import urllib, datetime, os
def fetch():
- url = 'http://loadmeter.egyptera.org/ClockToolTip.aspx'
- output = datetime.datetime.now().strftime('egyptera.%Y-%m-%d-%H-%M-%S.html')
+ # Instead of doing all the parsing later, I get the status from Mosab's site & store it
+ url = 'http://power-grid-status.mos3abof.com/status'
+ output = datetime.datetime.now().strftime('egyptera.%Y-%m-%d-%H-%M-%S.json')
output = os.path.join(os.path.dirname(__file__), output)
content = urllib.urlretrieve(url, output)
|
4a7b0fb482011400da0b3e760cde2d6f294d168f | sysrev/models.py | sysrev/models.py | from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
| from django.db import models
from django.contrib.auth.models import User
class Review(models.Model):
user = models.ForeignKey(User, default=None)
title = models.CharField(max_length=128)
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
completed = models.BooleanField(default=False)
query = models.TextField()
abstract_pool_size = models.IntegerField()
document_pool_size = models.IntegerField()
final_pool_size = models.IntegerField()
class Paper(models.Model):
review = models.ForeignKey(Review)
title = models.CharField(max_length=128)
authors = models.CharField(max_length=128)
abstract = models.TextField()
publish_date = models.DateField()
url = models.URLField()
notes = models.TextField()
ABSTRACT_POOL = 'A'
DOCUMENT_POOL = 'D'
FINAL_POOL = 'F'
REJECTED = 'R'
POOLS = ((ABSTRACT_POOL, 'Abstract pool'),
(DOCUMENT_POOL, 'Document pool'),
(FINAL_POOL, 'Final pool'),
(REJECTED, 'Rejected'))
pool = models.CharField(max_length=1, choices=POOLS, default=ABSTRACT_POOL)
| Add completed field to review | Add completed field to review
| Python | mit | iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview,iliawnek/SystematicReview | ---
+++
@@ -8,6 +8,7 @@
description = models.TextField()
date_created = models.DateTimeField(auto_now_add=True)
last_modified = models.DateTimeField(auto_now=True)
+ completed = models.BooleanField(default=False)
query = models.TextField()
abstract_pool_size = models.IntegerField() |
85f3d1396b5c3f615d83d355a1b836c29d23d295 | tests/__init__.py | tests/__init__.py | import sys
if sys.version_info < (2, 7):
from tests import TestCase # NOQA
else:
from unittest import TestCase # NOQA
| import sys
if sys.version_info < (2, 7):
from unittest2 import TestCase # NOQA
else:
from unittest import TestCase # NOQA
| Use right test case for py 2.6 | Use right test case for py 2.6
| Python | mit | gterzian/exam,gterzian/exam,Fluxx/exam,Fluxx/exam | ---
+++
@@ -2,6 +2,6 @@
if sys.version_info < (2, 7):
- from tests import TestCase # NOQA
+ from unittest2 import TestCase # NOQA
else:
from unittest import TestCase # NOQA |
0337f32a493690f5a0be3893c43b7af3644f964d | tests/runtests.py | tests/runtests.py | #!/usr/bin/env python
from __future__ import print_function, unicode_literals
import sys
import pytest
if __name__ == '__main__':
sys.exit(pytest.main(sys.argv[1:]))
| #!/usr/bin/env python
from __future__ import print_function, unicode_literals
import sys
import pytest
if __name__ == '__main__':
if len(sys.argv) >= 2 and sys.argv[1] != '--':
args = ['--db', sys.argv[1]] + sys.argv[2:]
else:
args = sys.argv[1:]
sys.exit(pytest.main(args))
| Fix running unit tests through the old test runner. | Fix running unit tests through the old test runner.
The old test runner (currently used in CI) had a regression when moving
to pytest. It failed to convert the first argument (the database name to
run everything against) to the new `pytest --db=` argument.
This change adds this back to the test runner. If an argument is passed,
and isn't `--`, it's expected to be the database name as before. All
other arguments are then passed as normal.
Testing Done:
Ran the old test runner with/without a database name, with/without `--`,
and with/without specific tests to run.
Reviewed at https://reviews.reviewboard.org/r/12129/
| Python | bsd-3-clause | beanbaginc/django-evolution | ---
+++
@@ -7,4 +7,9 @@
if __name__ == '__main__':
- sys.exit(pytest.main(sys.argv[1:]))
+ if len(sys.argv) >= 2 and sys.argv[1] != '--':
+ args = ['--db', sys.argv[1]] + sys.argv[2:]
+ else:
+ args = sys.argv[1:]
+
+ sys.exit(pytest.main(args)) |
9d40acb468a5b31d104cc1837b4778bc993326e7 | tests/testwith.py | tests/testwith.py | import sys
if sys.version_info[:2] > (2, 5):
from tests._testwith import *
else:
from tests.support import unittest2
class TestWith(unittest2.TestCase):
@unittest2.skip('tests using with statement skipped on Python 2.4')
def testWith(self):
pass
if __name__ == '__main__':
unittest2.main() | import sys
if sys.version_info[:2] >= (2, 5):
from tests._testwith import *
else:
from tests.support import unittest2
class TestWith(unittest2.TestCase):
@unittest2.skip('tests using with statement skipped on Python 2.4')
def testWith(self):
pass
if __name__ == '__main__':
unittest2.main() | Enable with statement tests for Python 2.5 | Enable with statement tests for Python 2.5
| Python | bsd-2-clause | 5monkeys/mock,Vanuan/mock | ---
+++
@@ -1,6 +1,6 @@
import sys
-if sys.version_info[:2] > (2, 5):
+if sys.version_info[:2] >= (2, 5):
from tests._testwith import *
else:
from tests.support import unittest2 |
9c1907d1b431281632da187617e857c4911c1ee1 | fmsgame_project/urls.py | fmsgame_project/urls.py | from django.conf.urls.defaults import patterns, url, include
from django.views.generic.simple import direct_to_template
import django.views.static
import settings
import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^fmsgame/', include('fmsgame.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
(r'^$', direct_to_template, { 'template': 'login.html', } ),
(r'^geolocate$', direct_to_template, { 'template': 'geolocate.html', } ),
( r'^find_issues', views.find_issues, ),
( r'^issue/(?P<issue_id>[\d]+)/$', views.issue ),
# openid login/registration
(r'^openid/', include( 'django_openid_auth.urls' )),
)
if settings.SERVE_STATIC_FILES:
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$',
django.views.static.serve,
{'document_root':settings.MEDIA_ROOT}),
)
| from django.conf.urls.defaults import patterns, url, include
from django.views.generic.simple import direct_to_template
import django.views.static
import settings
import views
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Example:
# (r'^fmsgame/', include('fmsgame.foo.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
# (r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# (r'^admin/', include(admin.site.urls)),
(r'^$', direct_to_template, { 'template': 'login.html', } ),
url(r'^geolocate$', direct_to_template, { 'template': 'geolocate.html', } name='geolocate'),
( r'^find_issues', views.find_issues, ),
( r'^issue/(?P<issue_id>[\d]+)/$', views.issue ),
# openid login/registration
(r'^openid/', include( 'django_openid_auth.urls' )),
)
if settings.SERVE_STATIC_FILES:
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$',
django.views.static.serve,
{'document_root':settings.MEDIA_ROOT}),
)
| Add back in geolocate url name. | Add back in geolocate url name.
| Python | agpl-3.0 | mysociety/fmsgame,mysociety/fmsgame,mysociety/fmsgame | ---
+++
@@ -21,7 +21,7 @@
(r'^$', direct_to_template, { 'template': 'login.html', } ),
- (r'^geolocate$', direct_to_template, { 'template': 'geolocate.html', } ),
+ url(r'^geolocate$', direct_to_template, { 'template': 'geolocate.html', } name='geolocate'),
( r'^find_issues', views.find_issues, ),
( r'^issue/(?P<issue_id>[\d]+)/$', views.issue ), |
8dd3457b20b5ce96cf7e0f5029e3541d57ca116d | wqflask/wqflask/decorators.py | wqflask/wqflask/decorators.py | """This module contains gn2 decorators"""
from flask import g
from typing import Dict
from functools import wraps
from utility.hmac import hmac_creation
from utility.tools import GN_PROXY_URL
import json
import requests
def edit_access_required(f):
"""Use this for endpoints where admins are required"""
@wraps(f)
def wrap(*args, **kwargs):
resource_id: str = ""
if kwargs.get("inbredset_id"): # data type: dataset-publish
resource_id = hmac_creation("dataset-publish:"
f"{kwargs.get('inbredset_id')}:"
f"{kwargs.get('name')}")
if kwargs.get("dataset_name"): # data type: dataset-probe
resource_id = hmac_creation("dataset-probeset:"
f"{kwargs.get('dataset_name')}")
response: Dict = {}
try:
_user_id = g.user_session.record.get(b"user_id",
"").decode("utf-8")
response = json.loads(
requests.get(GN_PROXY_URL + "available?resource="
f"{resource_id}&user={_user_id}").content)
except:
response = {}
if "edit" not in response.get("data", []):
return "You need to be admin", 401
return f(*args, **kwargs)
return wrap
| """This module contains gn2 decorators"""
import hashlib
import hmac
from flask import current_app, g
from typing import Dict
from functools import wraps
import json
import requests
def create_hmac(data: str, secret: str) -> str:
return hmac.new(bytearray(secret, "latin-1"),
bytearray(data, "utf-8"),
hashlib.sha1).hexdigest[:20]
def edit_access_required(f):
"""Use this for endpoints where admins are required"""
@wraps(f)
def wrap(*args, **kwargs):
resource_id: str = ""
if kwargs.get("inbredset_id"): # data type: dataset-publish
resource_id = create_hmac(
data=("dataset-publish:"
f"{kwargs.get('inbredset_id')}:"
f"{kwargs.get('name')}"),
secret=current_app.config.get("SECRET_HMAC_CODE"))
if kwargs.get("dataset_name"): # data type: dataset-probe
resource_id = create_hmac(
data=("dataset-probeset:"
f"{kwargs.get('dataset_name')}"),
secret=current_app.config.get("SECRET_HMAC_CODE"))
response: Dict = {}
try:
_user_id = g.user_session.record.get(b"user_id",
"").decode("utf-8")
response = json.loads(
requests.get(GN_PROXY_URL + "available?resource="
f"{resource_id}&user={_user_id}").content)
except:
response = {}
if "edit" not in response.get("data", []):
return "You need to be admin", 401
return f(*args, **kwargs)
return wrap
| Remove "utility.hmac.hmac_creation" which causes circular imports | Remove "utility.hmac.hmac_creation" which causes circular imports
Hacky but re-implement `hmac_creation` as `create_hmac`
| Python | agpl-3.0 | pjotrp/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2 | ---
+++
@@ -1,26 +1,34 @@
"""This module contains gn2 decorators"""
-from flask import g
+import hashlib
+import hmac
+from flask import current_app, g
from typing import Dict
from functools import wraps
-from utility.hmac import hmac_creation
-from utility.tools import GN_PROXY_URL
import json
import requests
+def create_hmac(data: str, secret: str) -> str:
+ return hmac.new(bytearray(secret, "latin-1"),
+ bytearray(data, "utf-8"),
+ hashlib.sha1).hexdigest[:20]
def edit_access_required(f):
"""Use this for endpoints where admins are required"""
@wraps(f)
def wrap(*args, **kwargs):
resource_id: str = ""
if kwargs.get("inbredset_id"): # data type: dataset-publish
- resource_id = hmac_creation("dataset-publish:"
- f"{kwargs.get('inbredset_id')}:"
- f"{kwargs.get('name')}")
+ resource_id = create_hmac(
+ data=("dataset-publish:"
+ f"{kwargs.get('inbredset_id')}:"
+ f"{kwargs.get('name')}"),
+ secret=current_app.config.get("SECRET_HMAC_CODE"))
if kwargs.get("dataset_name"): # data type: dataset-probe
- resource_id = hmac_creation("dataset-probeset:"
- f"{kwargs.get('dataset_name')}")
+ resource_id = create_hmac(
+ data=("dataset-probeset:"
+ f"{kwargs.get('dataset_name')}"),
+ secret=current_app.config.get("SECRET_HMAC_CODE"))
response: Dict = {}
try:
_user_id = g.user_session.record.get(b"user_id", |
fb1cfd15c646cc076a152acbd823b736f2a46724 | studygroups/migrations/0028_auto_20150806_0039.py | studygroups/migrations/0028_auto_20150806_0039.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0027_auto_20150513_2005'),
]
operations = [
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('address', models.CharField(max_length=256)),
('contact_name', models.CharField(max_length=256)),
('contact', models.CharField(max_length=256)),
('link', models.URLField()),
],
),
migrations.RemoveField(
model_name='studygroup',
name='location_link',
),
migrations.AddField(
model_name='studygroup',
name='location_details',
field=models.CharField(default='none', max_length=128),
preserve_default=False,
),
migrations.AlterField(
model_name='studygroup',
name='location',
field=models.ForeignKey(to='studygroups.Location'),
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0027_auto_20150513_2005'),
]
operations = [
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=256)),
('address', models.CharField(max_length=256)),
('contact_name', models.CharField(max_length=256)),
('contact', models.CharField(max_length=256)),
('link', models.URLField()),
],
),
migrations.RemoveField(
model_name='studygroup',
name='location_link',
),
migrations.AddField(
model_name='studygroup',
name='location_details',
field=models.CharField(default='none', max_length=128),
preserve_default=False,
),
migrations.RemoveField(
model_name='studygroup',
name='location',
),
migrations.AddField(
model_name='studygroup',
name='location',
field=models.ForeignKey(to='studygroups.Location'),
),
]
| Fix migration that fails on postgres | Fix migration that fails on postgres
| Python | mit | p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles | ---
+++
@@ -32,7 +32,11 @@
field=models.CharField(default='none', max_length=128),
preserve_default=False,
),
- migrations.AlterField(
+ migrations.RemoveField(
+ model_name='studygroup',
+ name='location',
+ ),
+ migrations.AddField(
model_name='studygroup',
name='location',
field=models.ForeignKey(to='studygroups.Location'), |
e78b8704a68b49593b43ac5b9690be36fca2e274 | default_config.py | default_config.py | class DefaultConfig():
secret_key = "LONG_AND_RANDOM"
tmdb_api_key = "THEMOVIEDB.ORG_API_KEY"
| class DefaultConfig():
secret_key = None # Django secret key used for sessions. Make this a long random string and keep it a secret
tmdb_api_key = None # API key for The Movie Database (themoviedb.org).
| Set default config values to none so user is forced to set them. | Set default config values to none so user is forced to set them.
Also document fields in the default config
| Python | mit | simon-andrews/movieman2,simon-andrews/movieman2 | ---
+++
@@ -1,3 +1,3 @@
class DefaultConfig():
- secret_key = "LONG_AND_RANDOM"
- tmdb_api_key = "THEMOVIEDB.ORG_API_KEY"
+ secret_key = None # Django secret key used for sessions. Make this a long random string and keep it a secret
+ tmdb_api_key = None # API key for The Movie Database (themoviedb.org). |
745148817bacacc26ff06a8470cf52c815f6565a | neutronclient/neutron/v2_0/availability_zone.py | neutronclient/neutron/v2_0/availability_zone.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutronclient.i18n import _
from neutronclient.neutron import v2_0 as neutronv20
def add_az_hint_argument(parser, resource):
parser.add_argument(
'--availability-zone-hint', metavar='AVAILABILITY_ZONE',
action='append', dest='availability_zone_hints',
help=_('Availability Zone for the %s '
'(requires availability zone extension, '
'this option can be repeated).') % resource)
def args2body_az_hint(parsed_args, resource):
if parsed_args.availability_zone_hints:
resource['availability_zone_hints'] = (
parsed_args.availability_zone_hints)
class ListAvailabilityZone(neutronv20.ListCommand):
"""List availability zones."""
resource = 'availability_zone'
list_columns = ['name', 'resource', 'state']
pagination_support = True
sorting_support = True
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutronclient._i18n import _
from neutronclient.neutron import v2_0 as neutronv20
def add_az_hint_argument(parser, resource):
parser.add_argument(
'--availability-zone-hint', metavar='AVAILABILITY_ZONE',
action='append', dest='availability_zone_hints',
help=_('Availability Zone for the %s '
'(requires availability zone extension, '
'this option can be repeated).') % resource)
def args2body_az_hint(parsed_args, resource):
if parsed_args.availability_zone_hints:
resource['availability_zone_hints'] = (
parsed_args.availability_zone_hints)
class ListAvailabilityZone(neutronv20.ListCommand):
"""List availability zones."""
resource = 'availability_zone'
list_columns = ['name', 'resource', 'state']
pagination_support = True
sorting_support = True
| Convert remaining use of neutronclient.i18n to _i18n | Convert remaining use of neutronclient.i18n to _i18n
Change-Id: I77f168af92ae51ce16bed4988bbcaf7c18557727
Related-Bug: 1519493
| Python | apache-2.0 | huntxu/python-neutronclient,eayunstack/python-neutronclient,rackerlabs/rackspace-python-neutronclient,eayunstack/python-neutronclient,Juniper/python-neutronclient,noironetworks/python-neutronclient,Juniper/python-neutronclient,noironetworks/python-neutronclient,huntxu/python-neutronclient,openstack/python-neutronclient,rackerlabs/rackspace-python-neutronclient,openstack/python-neutronclient | ---
+++
@@ -10,7 +10,7 @@
# License for the specific language governing permissions and limitations
# under the License.
-from neutronclient.i18n import _
+from neutronclient._i18n import _
from neutronclient.neutron import v2_0 as neutronv20
|
bc5abf988956235b48aeb1234d9944fe70be619a | pytest_hidecaptured.py | pytest_hidecaptured.py | # -*- coding: utf-8 -*-
def pytest_runtest_logreport(report):
"""Overwrite report by removing any captured stderr."""
# print("PLUGIN SAYS -> report -> {0}".format(report))
# print("PLUGIN SAYS -> report.sections -> {0}".format(report.sections))
# print("PLUGIN SAYS -> dir(report) -> {0}".format(dir(report)))
# print("PLUGIN SAYS -> type(report) -> {0}".format(type(report)))
sections = [item for item in report.sections if item[0] not in ("Captured stdout call", "Captured stderr call", "Captured stdout setup", "Captured stderr setup", "Captured stdout teardown", "Captured stderr teardown")]
# print("PLUGIN SAYS -> sections -> {0}".format(sections))
report.sections = sections
| # -*- coding: utf-8 -*-
import pytest
@pytest.mark.tryfirst
def pytest_runtest_logreport(report):
"""Overwrite report by removing any captured stderr."""
# print("PLUGIN SAYS -> report -> {0}".format(report))
# print("PLUGIN SAYS -> report.sections -> {0}".format(report.sections))
# print("PLUGIN SAYS -> dir(report) -> {0}".format(dir(report)))
# print("PLUGIN SAYS -> type(report) -> {0}".format(type(report)))
sections = [item for item in report.sections if item[0] not in ("Captured stdout call", "Captured stderr call", "Captured stdout setup", "Captured stderr setup", "Captured stdout teardown", "Captured stderr teardown")]
# print("PLUGIN SAYS -> sections -> {0}".format(sections))
report.sections = sections
| Fix interop issues with pytest-instafail | Fix interop issues with pytest-instafail
| Python | mit | hamzasheikh/pytest-hidecaptured | ---
+++
@@ -1,4 +1,7 @@
# -*- coding: utf-8 -*-
+import pytest
+
+@pytest.mark.tryfirst
def pytest_runtest_logreport(report):
"""Overwrite report by removing any captured stderr."""
# print("PLUGIN SAYS -> report -> {0}".format(report)) |
74db127246b7111a35c64079eec91d46f88ebd55 | src/test/stresstest.py | src/test/stresstest.py | #!/usr/bin/env python
# Copyright 2007 Albert Strasheim <fullung@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
if __name__ == '__main__':
testLoader = unittest.defaultTestLoader
module = __import__('__main__')
test = testLoader.loadTestsFromModule(module)
testRunner = unittest.TextTestRunner(verbosity=2)
for i in xrange(100):
result = testRunner.run(test)
| #!/usr/bin/env python
# Copyright 2007 Albert Strasheim <fullung@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from test_openwire_async import *
from test_openwire_sync import *
from test_stomp_async import *
from test_stomp_sync import *
from test_types import *
if __name__ == '__main__':
testLoader = unittest.defaultTestLoader
module = __import__('__main__')
test = testLoader.loadTestsFromModule(module)
testRunner = unittest.TextTestRunner(verbosity=2)
for i in xrange(100):
result = testRunner.run(test)
| Test everything with stress test. | Test everything with stress test.
| Python | apache-2.0 | tabish121/pyActiveMQ,tabish121/pyActiveMQ,tabish121/pyActiveMQ | ---
+++
@@ -16,6 +16,12 @@
import unittest
+from test_openwire_async import *
+from test_openwire_sync import *
+from test_stomp_async import *
+from test_stomp_sync import *
+from test_types import *
+
if __name__ == '__main__':
testLoader = unittest.defaultTestLoader
module = __import__('__main__') |
e422f77898853fc759d3828c4053b799cd2b1fa3 | plumeria/plugins/bot_control.py | plumeria/plugins/bot_control.py | from plumeria.command import commands, CommandError
from plumeria.message.lists import build_list
from plumeria.perms import owners_only
from plumeria.transport import transports
@commands.register('accept invite', category='Discord')
@owners_only
async def accept_invite(message):
"""
Accept an invite to join a server.
Example::
/accept invite https://discord.gg/00000
"""
url = message.content.strip()
results = []
if not len(url):
raise CommandError("Supply an invite URL.")
for transport in transports.transports.values():
if hasattr(transport, 'accept_invite'):
try:
await transport.accept_invite(url)
results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}'))
except Exception as e:
results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e))))
else:
results.append((transport.id, "\N{WARNING SIGN} No support for invite links"))
if len(results):
return build_list(["**{}:** {}".format(e[0], e[1]) for e in results])
else:
raise CommandError("No transports available.")
| from plumeria.command import commands, CommandError
from plumeria.message.lists import build_list
from plumeria.perms import owners_only
from plumeria.transport import transports
@commands.register('join', category='Discord')
@owners_only
async def join(message):
"""
Accept an invite to join a server.
Example::
/join https://discord.gg/00000
"""
url = message.content.strip()
results = []
if not len(url):
raise CommandError("Supply an invite URL.")
for transport in transports.transports.values():
if hasattr(transport, 'accept_invite'):
try:
await transport.accept_invite(url)
results.append((transport.id, 'Success \N{WHITE HEAVY CHECK MARK}'))
except Exception as e:
results.append((transport.id, '\N{WARNING SIGN} {}'.format(str(e))))
else:
results.append((transport.id, "\N{WARNING SIGN} No support for invite links"))
if len(results):
return build_list(["**{}:** {}".format(e[0], e[1]) for e in results])
else:
raise CommandError("No transports available.")
| Use /join instead of /accept invite. | Use /join instead of /accept invite.
| Python | mit | sk89q/Plumeria,sk89q/Plumeria,sk89q/Plumeria | ---
+++
@@ -4,15 +4,15 @@
from plumeria.transport import transports
-@commands.register('accept invite', category='Discord')
+@commands.register('join', category='Discord')
@owners_only
-async def accept_invite(message):
+async def join(message):
"""
Accept an invite to join a server.
Example::
- /accept invite https://discord.gg/00000
+ /join https://discord.gg/00000
"""
url = message.content.strip()
results = [] |
d8da4526375946551fd3963b42f2bb13035abb2d | hijack_admin/tests/test_hijack_admin.py | hijack_admin/tests/test_hijack_admin.py | # -*- coding: utf-8 -*-
from hijack.tests.test_hijack import BaseHijackTests
from hijack.tests.utils import SettingsOverride
from hijack_admin import settings as hijack_admin_settings
from hijack_admin.tests.test_app.models import RelatedModel
class HijackAdminTests(BaseHijackTests):
def setUp(self):
super(HijackAdminTests, self).setUp()
def tearDown(self):
super(HijackAdminTests, self).tearDown()
def test_hijack_button(self):
response = self.client.get('/admin/auth/user/')
self.assertTrue('<a href="/hijack/{}/" class="button">'.format(self.user.id) in str(response.content))
def test_hijack_button_related(self):
RelatedModel.objects.create(user=self.user)
response = self.client.get('/admin/test_app/relatedmodel/')
self.assertTrue('<a href="/hijack/{}/" class="button">'.format(self.user.id) in str(response.content))
def test_settings(self):
self.assertTrue(hasattr(hijack_admin_settings, 'HIJACK_BUTTON_TEMPLATE'))
self.assertEqual(hijack_admin_settings.HIJACK_BUTTON_TEMPLATE, 'hijack_admin/admin_button.html')
self.assertTrue(hasattr(hijack_admin_settings, 'HIJACK_REGISTER_ADMIN'))
self.assertEqual(hijack_admin_settings.HIJACK_REGISTER_ADMIN, True)
| # -*- coding: utf-8 -*-
from hijack.tests.test_hijack import BaseHijackTests
from hijack_admin import settings as hijack_admin_settings
from hijack_admin.tests.test_app.models import RelatedModel
class HijackAdminTests(BaseHijackTests):
def setUp(self):
super(HijackAdminTests, self).setUp()
def tearDown(self):
super(HijackAdminTests, self).tearDown()
def test_hijack_button(self):
response = self.client.get('/admin/auth/user/')
self.assertTrue('<a href="/hijack/{}/" class="button">'.format(self.user.id) in str(response.content))
def test_hijack_button_related(self):
RelatedModel.objects.create(user=self.user)
response = self.client.get('/admin/test_app/relatedmodel/')
self.assertTrue('<a href="/hijack/{}/" class="button">'.format(self.user.id) in str(response.content))
def test_settings(self):
self.assertTrue(hasattr(hijack_admin_settings, 'HIJACK_BUTTON_TEMPLATE'))
self.assertEqual(hijack_admin_settings.HIJACK_BUTTON_TEMPLATE, 'hijack_admin/admin_button.html')
self.assertTrue(hasattr(hijack_admin_settings, 'HIJACK_REGISTER_ADMIN'))
self.assertEqual(hijack_admin_settings.HIJACK_REGISTER_ADMIN, True)
| Remove unused import in tests | Remove unused import in tests
| Python | mit | arteria/django-hijack-admin,arteria/django-hijack-admin,arteria/django-hijack-admin | ---
+++
@@ -1,7 +1,5 @@
# -*- coding: utf-8 -*-
-
from hijack.tests.test_hijack import BaseHijackTests
-from hijack.tests.utils import SettingsOverride
from hijack_admin import settings as hijack_admin_settings
from hijack_admin.tests.test_app.models import RelatedModel |
28385e10d00be3b68d312ed6ea19695c21c0beeb | version.py | version.py | major = 0
minor=0
patch=19
branch="master"
timestamp=1376526199.16 | major = 0
minor=0
patch=20
branch="master"
timestamp=1376526219.94 | Tag commit for v0.0.20-master generated by gitmake.py | Tag commit for v0.0.20-master generated by gitmake.py
| Python | mit | ryansturmer/gitmake | ---
+++
@@ -1,5 +1,5 @@
major = 0
minor=0
-patch=19
+patch=20
branch="master"
-timestamp=1376526199.16
+timestamp=1376526219.94 |
d69b41307b94db7e8658fd209fdae0e0240bc62a | Monstr/Core/Config.py | Monstr/Core/Config.py | import ConfigParser
Config = ConfigParser.ConfigParser()
import os
print os.getcwd()
try:
Config.read('/opt/monstr/current.cfg')
except Exception as e:
print 'WARNING! Configuration is missing. Using test_conf.cfg'
Config.read('test.cfg')
def get_section(section):
result = {}
if section in Config.sections():
options = Config.options(section)
for option in options:
result[option] = Config.get(section, option)
return result
else:
raise 'Requested section is absent in configuration'
| import ConfigParser
Config = ConfigParser.ConfigParser()
import os
print os.getcwd()
try:
Config.read('/opt/monstr/current.cfg')
except Exception as e:
print 'WARNING! Configuration is missing. Using test_conf.cfg'
Config.read('test.cfg')
def get_section(section):
result = {}
if section in Config.sections():
options = Config.options(section)
for option in options:
result[option] = Config.get(section, option)
return result
else:
raise Exception('Requested section is absent in configuration')
| Raise Exception instead of plain string | FIX: Raise Exception instead of plain string
| Python | apache-2.0 | tier-one-monitoring/monstr,tier-one-monitoring/monstr | ---
+++
@@ -17,4 +17,4 @@
result[option] = Config.get(section, option)
return result
else:
- raise 'Requested section is absent in configuration'
+ raise Exception('Requested section is absent in configuration') |
4c414e753cc086e0ae52425a3eae6de453b492ca | lot/landmapper/tests/test_pdf.py | lot/landmapper/tests/test_pdf.py | from django.test import TestCase
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
# from selenium.webdriver.firefox.webdriver import WebDriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
class PdfTests(StaticLiveServerTestCase):
"""
Open a browser
Make API call at url /report/<str:property_id>/pdf
Check PDF in page title
Check PDF images are correct size
"""
@classmethod
def setUpClass(self):
super().setUpClass()
self.selenium = webdriver.firefox.webdriver.WebDriver()
self.selenium.implicitly_wait(10)
@classmethod
def tearDownClass(self):
self.selenium.quit()
super().tearDownClass()
def test_cover_map_img(self):
property_id = 'Demo%7C583966%7C862934'
# New URL
self.selenium.get("http://localhost:8000/report/%s/%s/map_509x722" % (property_id, 'property'))
# return img
# check dimensions
# self.assert()
def test_create_pdf(self):
property_id = 'Demo%7C583966%7C862934'
self.selenium.get("http://localhost:8000/report/%s/pdf" % property_id)
self.assertIn('pdf', self.selenium.title)
| from django.test import TestCase
from django.conf import settings
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
# from selenium.webdriver.firefox.webdriver import WebDriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
class PdfTests(StaticLiveServerTestCase):
"""
Open a browser
Make API call at url /report/<str:property_id>/pdf
Check PDF in page title
Check PDF images are correct size
"""
@classmethod
def setUpClass(self):
super().setUpClass()
self.selenium = webdriver.firefox.webdriver.WebDriver()
self.selenium.implicitly_wait(10)
@classmethod
def tearDownClass(self):
self.selenium.quit()
super().tearDownClass()
def test_cover_map_img(self):
property_id = 'Demo%7C583966%7C862934'
# New URL
self.selenium.get("http://localhost:8000/report/%s/%s/map_alt" % (property_id, 'property'))
# return img
# check dimensions
# self.assert()
def test_create_pdf(self):
property_id = 'Demo%7C583966%7C862934'
self.selenium.get("http://localhost:8000/report/%s/pdf" % property_id)
self.assertIn('pdf', self.selenium.title)
| Use alt naming convention instead | Use alt naming convention instead
| Python | bsd-3-clause | Ecotrust/forestplanner,Ecotrust/forestplanner,Ecotrust/forestplanner,Ecotrust/forestplanner,Ecotrust/forestplanner,Ecotrust/forestplanner,Ecotrust/forestplanner,Ecotrust/forestplanner | ---
+++
@@ -32,7 +32,7 @@
def test_cover_map_img(self):
property_id = 'Demo%7C583966%7C862934'
# New URL
- self.selenium.get("http://localhost:8000/report/%s/%s/map_509x722" % (property_id, 'property'))
+ self.selenium.get("http://localhost:8000/report/%s/%s/map_alt" % (property_id, 'property'))
# return img
# check dimensions
|
ded0d2ced823deeabf860abd9ec5120165ed7fde | djoser/utils.py | djoser/utils.py | from django.contrib.auth import user_logged_in, user_logged_out, login, logout
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from djoser.conf import settings
def encode_uid(pk):
return urlsafe_base64_encode(force_bytes(pk)).decode()
def decode_uid(pk):
return force_text(urlsafe_base64_decode(pk))
def login_user(request, user):
token, _ = settings.TOKEN_MODEL.objects.get_or_create(user=user)
if settings.CREATE_SESSION_ON_LOGIN:
login(request, user)
user_logged_in.send(sender=user.__class__, request=request, user=user)
return token
def logout_user(request):
if settings.TOKEN_MODEL:
settings.TOKEN_MODEL.objects.filter(user=request.user).delete()
user_logged_out.send(
sender=request.user.__class__, request=request, user=request.user
)
if settings.CREATE_SESSION_ON_LOGIN:
logout(request)
class ActionViewMixin(object):
def post(self, request):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._action(serializer)
| from django.contrib.auth import user_logged_in, user_logged_out, login, logout
from django.utils.encoding import force_bytes, force_text
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from djoser.conf import settings
def encode_uid(pk):
return urlsafe_base64_encode(force_bytes(pk)).decode()
def decode_uid(pk):
return force_text(urlsafe_base64_decode(pk))
def login_user(request, user):
token, _ = settings.TOKEN_MODEL.objects.get_or_create(user=user)
if settings.CREATE_SESSION_ON_LOGIN:
login(request, user)
user_logged_in.send(sender=user.__class__, request=request, user=user)
return token
def logout_user(request):
if settings.TOKEN_MODEL:
settings.TOKEN_MODEL.objects.filter(user=request.user).delete()
user_logged_out.send(
sender=request.user.__class__, request=request, user=request.user
)
if settings.CREATE_SESSION_ON_LOGIN:
logout(request)
class ActionViewMixin(object):
def post(self, request, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._action(serializer)
| Add **kwargs to ActionViewmixin.post() handler | Add **kwargs to ActionViewmixin.post() handler
Details: #359
| Python | mit | sunscrapers/djoser,sunscrapers/djoser,sunscrapers/djoser | ---
+++
@@ -32,7 +32,7 @@
class ActionViewMixin(object):
- def post(self, request):
+ def post(self, request, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
return self._action(serializer) |
b49c40dddb5b90881f9f86872cb6a3b6044e1637 | djpg/signals.py | djpg/signals.py | import logging
logger = logging.getLogger('djpg')
from django.dispatch import Signal
from .codes import codes
notification_received = Signal(providing_args=['notification'])
transaction_received = Signal(providing_args=['transaction'])
transaction_waiting = Signal()
transaction_analysis = Signal()
transaction_paid = Signal()
transaction_available = Signal()
transaction_dispute = Signal()
transaction_returned = Signal()
transaction_canceled = Signal()
transaction_unknown = Signal()
def dispatch_transaction(sender, **kwargs):
transaction = kwargs.pop('transaction')
code = int(transaction['code'])
status = int(transaction['status'])
signals = {
codes.waiting: transaction_waiting,
codes.analysis: transaction_analysis,
codes.paid: transaction_paid,
codes.available: transaction_available,
codes.dispute: transaction_dispute,
codes.returned: transaction_returned,
codes.canceled: transaction_canceled
}
signals \
.get(status, transaction_unknown) \
.send(sender=None, transaction=transaction)
logger.info('Transaction with status "%s" and code "%s" dispatched'
% (status, code))
transaction_received.connect(dispatch_transaction)
| import logging
logger = logging.getLogger('djpg')
from django.dispatch import Signal
from .codes import codes
notification_received = Signal(providing_args=['notification'])
transaction_received = Signal(providing_args=['transaction'])
transaction_waiting = Signal()
transaction_analysis = Signal()
transaction_paid = Signal()
transaction_available = Signal()
transaction_dispute = Signal()
transaction_returned = Signal()
transaction_canceled = Signal()
transaction_unknown = Signal()
def dispatch_transaction(sender, **kwargs):
transaction = kwargs.pop('transaction')
code = transaction['code']
status = int(transaction['status'])
signals = {
codes.waiting: transaction_waiting,
codes.analysis: transaction_analysis,
codes.paid: transaction_paid,
codes.available: transaction_available,
codes.dispute: transaction_dispute,
codes.returned: transaction_returned,
codes.canceled: transaction_canceled
}
signals \
.get(status, transaction_unknown) \
.send(sender=None, transaction=transaction)
logger.info('Transaction with status "%s" and code "%s" dispatched'
% (status, code))
transaction_received.connect(dispatch_transaction)
| Fix bug when trying to convert transaction code to int | Fix bug when trying to convert transaction code to int
| Python | mit | mstrcnvs/djpg | ---
+++
@@ -19,7 +19,7 @@
def dispatch_transaction(sender, **kwargs):
transaction = kwargs.pop('transaction')
- code = int(transaction['code'])
+ code = transaction['code']
status = int(transaction['status'])
signals = { |
9349fc0f176a40de58fba551b205c771af3ea7b7 | djproxy/util.py | djproxy/util.py | # import_string was appropriated from django and then rewritten for broader
# python support. The version of this method can't be imported from Django
# directly because it didn't exist until 1.7.
def import_string(dotted_path):
"""
Import a dotted module path.
Returns the attribute/class designated by the last name in the path.
Raises ImportError if the import fails.
"""
try:
module_path, class_name = dotted_path.rsplit('.', 1)
except ValueError:
raise ImportError('%s doesn\'t look like a valid path' % dotted_path)
module = __import__(module_path, fromlist=['class_name'])
try:
return getattr(module, class_name)
except AttributeError:
msg = 'Module "%s" does not define a "%s" attribute/class' % (
dotted_path, class_name)
raise ImportError(msg)
| # import_string was appropriated from django and then rewritten for broader
# python support. The version of this method can't be imported from Django
# directly because it didn't exist until 1.7.
def import_string(dotted_path):
"""
Import a dotted module path.
Returns the attribute/class designated by the last name in the path.
Raises ImportError if the import fails.
"""
try:
module_path, class_name = dotted_path.rsplit('.', 1)
except ValueError:
raise ImportError('%s doesn\'t look like a valid path' % dotted_path)
module = __import__(module_path, fromlist=[class_name])
try:
return getattr(module, class_name)
except AttributeError:
msg = 'Module "%s" does not define a "%s" attribute/class' % (
dotted_path, class_name)
raise ImportError(msg)
| Use valid fromlist parameter when calling __import__ | Use valid fromlist parameter when calling __import__
This was working before, but it wasn't what I had intended to write. It doesn't really matter what is passed in here as long as it isn't an empty list.
| Python | mit | thomasw/djproxy | ---
+++
@@ -17,7 +17,7 @@
except ValueError:
raise ImportError('%s doesn\'t look like a valid path' % dotted_path)
- module = __import__(module_path, fromlist=['class_name'])
+ module = __import__(module_path, fromlist=[class_name])
try:
return getattr(module, class_name) |
6a98ec7a72d91da09c40fe189fcfce51c7736782 | app/schedule/tasks.py | app/schedule/tasks.py | from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
| from django.conf import settings
from app.schedule.celery import celery_app
from app.schedule.libs.sms import DeviceNotFoundError
@celery_app.task(bind=True)
def send_message(self, to, message):
messenger = settings.APP_MESSENGER_CLASS
user = settings.SMS_GATEWAY_USER
password = settings.SMS_GATEWAY_PASSWORD
try:
messenger = messenger(user, password)
messenger.get_best_device()
return messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5)
| Return the message status to the message broker | fix: Return the message status to the message broker
| Python | agpl-3.0 | agendaodonto/server,agendaodonto/server | ---
+++
@@ -12,6 +12,6 @@
try:
messenger = messenger(user, password)
messenger.get_best_device()
- messenger.send_message(to, message)
+ return messenger.send_message(to, message)
except DeviceNotFoundError as e:
self.retry(exc=e, max_retries=settings.CELERY_TASK_MAX_RETRY, countdown=60 * 5) |
9fefa30f51f1a3c0e4586bc21c36324c6dfbbc87 | test/tst_filepath.py | test/tst_filepath.py | import os
import unittest
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
if __name__ == '__main__':
unittest.main()
| import os
import unittest
import tempfile
import netCDF4
class test_filepath(unittest.TestCase):
def setUp(self):
self.netcdf_file = os.path.join(os.getcwd(), "netcdf_dummy_file.nc")
self.nc = netCDF4.Dataset(self.netcdf_file)
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
def test_filepath_with_non_ascii_characters(self):
# create nc-file in a filepath with Non-Ascii-Characters
tempdir = tempfile.mkdtemp(prefix='ÄÖÜß_')
nc_non_ascii_file = os.path.join(tempdir, "Besançonalléestraße.nc")
nc_non_ascii = netCDF4.Dataset(nc_non_ascii_file, 'w')
# test that no UnicodeDecodeError occur in the filepath() method
assert nc_non_ascii.filepath() == str(nc_non_ascii_file)
# cleanup
nc_non_ascii.close()
os.remove(nc_non_ascii_file)
os.rmdir(tempdir)
if __name__ == '__main__':
unittest.main()
| Add test for filepath with non-ascii-chars | Add test for filepath with non-ascii-chars | Python | mit | Unidata/netcdf4-python,Unidata/netcdf4-python,Unidata/netcdf4-python | ---
+++
@@ -1,5 +1,6 @@
import os
import unittest
+import tempfile
import netCDF4
class test_filepath(unittest.TestCase):
@@ -11,5 +12,20 @@
def test_filepath(self):
assert self.nc.filepath() == str(self.netcdf_file)
+ def test_filepath_with_non_ascii_characters(self):
+ # create nc-file in a filepath with Non-Ascii-Characters
+ tempdir = tempfile.mkdtemp(prefix='ÄÖÜß_')
+ nc_non_ascii_file = os.path.join(tempdir, "Besançonalléestraße.nc")
+ nc_non_ascii = netCDF4.Dataset(nc_non_ascii_file, 'w')
+
+ # test that no UnicodeDecodeError occur in the filepath() method
+ assert nc_non_ascii.filepath() == str(nc_non_ascii_file)
+
+ # cleanup
+ nc_non_ascii.close()
+ os.remove(nc_non_ascii_file)
+ os.rmdir(tempdir)
+
+
if __name__ == '__main__':
unittest.main() |
42abaca67b742ff343c4f6c5553b9eb9dad28d43 | skeleton/__init__.py | skeleton/__init__.py | """
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies; it should also be compatible with Python 3.
However in this early phase of development, it only target python 2.5+,
and tests require Mock.
"""
from skeleton.core import Skeleton, Var
| """
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
without any dependencies.
"""
from skeleton.core import Skeleton, Var
from skeleton.utils import insert_into_file
| Add insert_into_file to skeleton module | Add insert_into_file to skeleton module | Python | bsd-2-clause | dinoboff/skeleton | ---
+++
@@ -2,10 +2,8 @@
Basic Template system for project skeleton.
skeleton is similar to the template part of PasteScript but
-without any dependencies; it should also be compatible with Python 3.
-
-However in this early phase of development, it only target python 2.5+,
-and tests require Mock.
+without any dependencies.
"""
from skeleton.core import Skeleton, Var
+from skeleton.utils import insert_into_file |
15eae70c91cd08f9028944f8b6a3990d3170aa28 | snippet_parser/fr.py | snippet_parser/fr.py | #-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
| #-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = unicode(template.params[0])
if len(template.params) == 2:
ret += unicode(template.params[1])
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
| Fix params handling in {{s}}. | Fix params handling in {{s}}.
| Python | mit | Stryn/citationhunt,Stryn/citationhunt,Stryn/citationhunt,jhsoby/citationhunt,Stryn/citationhunt,jhsoby/citationhunt,jhsoby/citationhunt,jhsoby/citationhunt | ---
+++
@@ -17,9 +17,9 @@
return unicode(template.params[0])
def handle_s(template):
- ret = template.params[0]
+ ret = unicode(template.params[0])
if len(template.params) == 2:
- ret += template.params[1]
+ ret += unicode(template.params[1])
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret |
665af108d44e5aa91483bc70c0c76dab9b297d41 | tests/test_api_info.py | tests/test_api_info.py | from tests.types import string
def test_get(test):
def handle_connect(handler):
info = handler.api.get_info()
assert isinstance(info['api_version'], string)
assert isinstance(info['server_timestamp'], string)
if info.get('rest_server_url'):
assert info['websocket_server_url'] is None
assert isinstance(info['rest_server_url'], string)
return
assert isinstance(info['websocket_server_url'], string)
assert info['rest_server_url'] is None
test.run(handle_connect)
| from tests.types import string
from tests.conftest import not_implemented_skip
def test_get(test):
def handle_connect(handler):
info = handler.api.get_info()
assert isinstance(info['api_version'], string)
assert isinstance(info['server_timestamp'], string)
if info.get('rest_server_url'):
assert info['websocket_server_url'] is None
assert isinstance(info['rest_server_url'], string)
return
assert isinstance(info['websocket_server_url'], string)
assert info['rest_server_url'] is None
test.run(handle_connect)
@not_implemented_skip
def test_get_cluster(test):
pass
| Add not implemeted get cluster | Add not implemeted get cluster
| Python | apache-2.0 | devicehive/devicehive-python | ---
+++
@@ -1,4 +1,5 @@
from tests.types import string
+from tests.conftest import not_implemented_skip
def test_get(test):
@@ -15,3 +16,8 @@
assert info['rest_server_url'] is None
test.run(handle_connect)
+
+
+@not_implemented_skip
+def test_get_cluster(test):
+ pass |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.