commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
53d5f47c828bec78e7241cb9e3d4f614dd18e6f9
|
responder.py
|
responder.py
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response("Hello world", mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response(self._excuse, mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
Fix bug with text/plain response
|
Fix bug with text/plain response
|
Python
|
mit
|
aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools,aaronbassett/Bad-Tools
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response("Hello world", mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
Fix bug with text/plain response
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response(self._excuse, mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
<commit_before>import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response("Hello world", mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
<commit_msg>Fix bug with text/plain response<commit_after>
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response(self._excuse, mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response("Hello world", mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
Fix bug with text/plain responseimport random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response(self._excuse, mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
<commit_before>import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response("Hello world", mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
<commit_msg>Fix bug with text/plain response<commit_after>import random
import yaml
from flask import jsonify, Response, render_template
class Which(object):
def __init__(self, mime_type, args):
self.mime_type = mime_type
self.args = args
@property
def _excuse(self):
stream = open("excuses.yaml", 'r')
excuses = yaml.load(stream)
return random.choice(excuses["excuses"])
def get_response(self):
if self.mime_type == "application/json":
return jsonify({
"excuse": self._excuse
}), "/json/"
elif self.mime_type == "application/xml":
return Response(
render_template('xml.xml', excuse=self._excuse),
mimetype='text/xml'
), "/xml/"
elif self.mime_type == "application/javascript" or "jsonp" in self.args:
return Response(
render_template('jsonp.js', excuse=self._excuse),
mimetype='application/javascript'
), "/jsonp/"
elif self.mime_type == "text/plain":
return Response(self._excuse, mimetype='text/plain'), "/text/"
else:
return render_template('html.html', excuse=self._excuse), "/html/"
|
1ba617690bbf50648a096875f419774064d284a6
|
rstfinder/__init__.py
|
rstfinder/__init__.py
|
# Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
# Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
try:
import zpar
except ImportError:
raise ImportError("The 'python-zpar' package is missing. Run 'pip install python-zpar' to install it.") from None
|
Add check for python zpar when importing rstfinder.
|
Add check for python zpar when importing rstfinder.
|
Python
|
mit
|
EducationalTestingService/discourse-parsing,EducationalTestingService/discourse-parsing
|
# Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())Add check for python zpar when importing rstfinder.
|
# Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
try:
import zpar
except ImportError:
raise ImportError("The 'python-zpar' package is missing. Run 'pip install python-zpar' to install it.") from None
|
<commit_before># Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())<commit_msg>Add check for python zpar when importing rstfinder.<commit_after>
|
# Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
try:
import zpar
except ImportError:
raise ImportError("The 'python-zpar' package is missing. Run 'pip install python-zpar' to install it.") from None
|
# Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())Add check for python zpar when importing rstfinder.# Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
try:
import zpar
except ImportError:
raise ImportError("The 'python-zpar' package is missing. Run 'pip install python-zpar' to install it.") from None
|
<commit_before># Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())<commit_msg>Add check for python zpar when importing rstfinder.<commit_after># Ensure there won't be logging complaints about no handlers being set
import logging
logging.getLogger(__name__).addHandler(logging.NullHandler())
try:
import zpar
except ImportError:
raise ImportError("The 'python-zpar' package is missing. Run 'pip install python-zpar' to install it.") from None
|
b0f4158beebdb1edac9305e63a9fb77946d3a59f
|
run_tests.py
|
run_tests.py
|
import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
# argv.extend(sys.argv[1:])
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
subprocess.call(argv)
with binding("PySide"):
subprocess.call(argv)
with binding("PyQt5"):
subprocess.call(argv)
with binding("PySide2"):
subprocess.call(argv)
|
import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
errors = 0
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
errors += subprocess.call(argv)
with binding("PySide"):
errors += subprocess.call(argv)
with binding("PyQt5"):
errors += subprocess.call(argv)
with binding("PySide2"):
errors += subprocess.call(argv)
if errors:
raise Exception("%i binding(s) failed." % errors)
|
Throw exception when primary tests fail
|
Throw exception when primary tests fail
|
Python
|
mit
|
mottosso/Qt.py,fredrikaverpil/Qt.py,mottosso/Qt.py,fredrikaverpil/Qt.py
|
import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
# argv.extend(sys.argv[1:])
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
subprocess.call(argv)
with binding("PySide"):
subprocess.call(argv)
with binding("PyQt5"):
subprocess.call(argv)
with binding("PySide2"):
subprocess.call(argv)
Throw exception when primary tests fail
|
import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
errors = 0
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
errors += subprocess.call(argv)
with binding("PySide"):
errors += subprocess.call(argv)
with binding("PyQt5"):
errors += subprocess.call(argv)
with binding("PySide2"):
errors += subprocess.call(argv)
if errors:
raise Exception("%i binding(s) failed." % errors)
|
<commit_before>import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
# argv.extend(sys.argv[1:])
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
subprocess.call(argv)
with binding("PySide"):
subprocess.call(argv)
with binding("PyQt5"):
subprocess.call(argv)
with binding("PySide2"):
subprocess.call(argv)
<commit_msg>Throw exception when primary tests fail<commit_after>
|
import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
errors = 0
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
errors += subprocess.call(argv)
with binding("PySide"):
errors += subprocess.call(argv)
with binding("PyQt5"):
errors += subprocess.call(argv)
with binding("PySide2"):
errors += subprocess.call(argv)
if errors:
raise Exception("%i binding(s) failed." % errors)
|
import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
# argv.extend(sys.argv[1:])
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
subprocess.call(argv)
with binding("PySide"):
subprocess.call(argv)
with binding("PyQt5"):
subprocess.call(argv)
with binding("PySide2"):
subprocess.call(argv)
Throw exception when primary tests failimport os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
errors = 0
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
errors += subprocess.call(argv)
with binding("PySide"):
errors += subprocess.call(argv)
with binding("PyQt5"):
errors += subprocess.call(argv)
with binding("PySide2"):
errors += subprocess.call(argv)
if errors:
raise Exception("%i binding(s) failed." % errors)
|
<commit_before>import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
# argv.extend(sys.argv[1:])
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
subprocess.call(argv)
with binding("PySide"):
subprocess.call(argv)
with binding("PyQt5"):
subprocess.call(argv)
with binding("PySide2"):
subprocess.call(argv)
<commit_msg>Throw exception when primary tests fail<commit_after>import os
import sys
import contextlib
import subprocess
@contextlib.contextmanager
def binding(binding):
"""Prepare an environment for a specific binding"""
sys.stderr.write("""\
#
# Running tests with %s..
#
""" % binding)
os.environ["QT_PREFERRED_BINDING"] = binding
try:
yield
except:
pass
os.environ.pop("QT_PREFERRED_BINDING")
if __name__ == "__main__":
argv = [
"nosetests",
"--verbose",
"--with-process-isolation",
"--exe",
]
errors = 0
# Running each test independently via subprocess
# enables tests to filter out from tests.py before
# being split into individual processes via the
# --with-process-isolation feature of nose.
with binding("PyQt4"):
errors += subprocess.call(argv)
with binding("PySide"):
errors += subprocess.call(argv)
with binding("PyQt5"):
errors += subprocess.call(argv)
with binding("PySide2"):
errors += subprocess.call(argv)
if errors:
raise Exception("%i binding(s) failed." % errors)
|
0a2f63367cdb8bffdf762da78fb4888bef9c7d22
|
run_tests.py
|
run_tests.py
|
#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
|
#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1]) or test.id().endswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
|
Support running test just by test name
|
Support running test just by test name
|
Python
|
apache-2.0
|
illicitonion/givabit,illicitonion/givabit,illicitonion/givabit,illicitonion/givabit,illicitonion/givabit,illicitonion/givabit,illicitonion/givabit,illicitonion/givabit
|
#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
Support running test just by test name
|
#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1]) or test.id().endswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
|
<commit_before>#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
<commit_msg>Support running test just by test name<commit_after>
|
#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1]) or test.id().endswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
|
#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
Support running test just by test name#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1]) or test.id().endswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
|
<commit_before>#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
<commit_msg>Support running test just by test name<commit_after>#!/usr/bin/env python
import sys
sys.path.append('lib/sdks/google_appengine_1.7.1/google_appengine')
import dev_appserver
import unittest
dev_appserver.fix_sys_path()
suites = unittest.loader.TestLoader().discover("src/givabit", pattern="*_test.py")
if len(sys.argv) > 1:
def GetTestCases(caseorsuite, acc=None):
if acc is None:
acc = []
if isinstance(caseorsuite, unittest.TestCase):
acc.append(caseorsuite)
return acc
for child in caseorsuite:
GetTestCases(child, acc)
return acc
all_tests = GetTestCases(suites)
tests = [test for test in all_tests if test.id().startswith(sys.argv[1]) or test.id().endswith(sys.argv[1])]
suites = unittest.TestSuite(tests)
unittest.TextTestRunner(verbosity=1).run(suites)
|
1baa01d63a75272a353d21475de80ec60e562d72
|
runserver.py
|
runserver.py
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
app.run(debug=True)
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
def main():
print "Launching kremlin in development mode."
print "--------------------------------------"
app.run(debug=True)
if __name__ == '__main__':
main()
|
Add header to dev launch script
|
Add header to dev launch script
|
Python
|
bsd-2-clause
|
glasnost/kremlin,glasnost/kremlin,glasnost/kremlin
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
app.run(debug=True)
Add header to dev launch script
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
def main():
print "Launching kremlin in development mode."
print "--------------------------------------"
app.run(debug=True)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
app.run(debug=True)
<commit_msg>Add header to dev launch script<commit_after>
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
def main():
print "Launching kremlin in development mode."
print "--------------------------------------"
app.run(debug=True)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
app.run(debug=True)
Add header to dev launch script#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
def main():
print "Launching kremlin in development mode."
print "--------------------------------------"
app.run(debug=True)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
app.run(debug=True)
<commit_msg>Add header to dev launch script<commit_after>#!/usr/bin/env python
"""
# # #### ##### # # ##### # # # #
# # # # # ## ## # # # ## # #
### #### #### # # # # # # # # #####
# # # # # # # # ## # # #
# # # ##### # # # # # # # #
Kremlin Magical Everything System
Glasnost Image Board and Boredom Inhibitor
"""
from kremlin import app
def main():
print "Launching kremlin in development mode."
print "--------------------------------------"
app.run(debug=True)
if __name__ == '__main__':
main()
|
4136358896654b24df42c6dc963c0d071c31eec3
|
snakewatch/config.py
|
snakewatch/config.py
|
import json
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
super(DefaultConfig, self).__init__([
{
'regex': '.*',
'action': 'Print',
},
])
|
import json
import os
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
user_default = os.path.expanduser(os.path.join('~', '.snakewatch', 'default.json'))
if os.path.exists(user_default):
cfg = user_default
else:
cfg = [
{
'regex': '.*',
'action': 'Print',
},
]
super(DefaultConfig, self).__init__(cfg)
|
Use ~/.snakewatch/default.json if exists, fallback on built-in.
|
Use ~/.snakewatch/default.json if exists, fallback on built-in.
|
Python
|
bsd-3-clause
|
asoc/snakewatch
|
import json
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
super(DefaultConfig, self).__init__([
{
'regex': '.*',
'action': 'Print',
},
])Use ~/.snakewatch/default.json if exists, fallback on built-in.
|
import json
import os
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
user_default = os.path.expanduser(os.path.join('~', '.snakewatch', 'default.json'))
if os.path.exists(user_default):
cfg = user_default
else:
cfg = [
{
'regex': '.*',
'action': 'Print',
},
]
super(DefaultConfig, self).__init__(cfg)
|
<commit_before>import json
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
super(DefaultConfig, self).__init__([
{
'regex': '.*',
'action': 'Print',
},
])<commit_msg>Use ~/.snakewatch/default.json if exists, fallback on built-in.<commit_after>
|
import json
import os
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
user_default = os.path.expanduser(os.path.join('~', '.snakewatch', 'default.json'))
if os.path.exists(user_default):
cfg = user_default
else:
cfg = [
{
'regex': '.*',
'action': 'Print',
},
]
super(DefaultConfig, self).__init__(cfg)
|
import json
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
super(DefaultConfig, self).__init__([
{
'regex': '.*',
'action': 'Print',
},
])Use ~/.snakewatch/default.json if exists, fallback on built-in.import json
import os
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
user_default = os.path.expanduser(os.path.join('~', '.snakewatch', 'default.json'))
if os.path.exists(user_default):
cfg = user_default
else:
cfg = [
{
'regex': '.*',
'action': 'Print',
},
]
super(DefaultConfig, self).__init__(cfg)
|
<commit_before>import json
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
super(DefaultConfig, self).__init__([
{
'regex': '.*',
'action': 'Print',
},
])<commit_msg>Use ~/.snakewatch/default.json if exists, fallback on built-in.<commit_after>import json
import os
import importlib
class Config(object):
available_actions = {}
def __init__(self, cfg, *args):
if isinstance(cfg, str):
fp = open(cfg, 'r')
self.cfg = json.load(fp)
fp.close()
elif isinstance(cfg, list):
self.cfg = cfg
self.actions = []
self.check_actions()
def check_actions(self):
for entry in self.cfg:
name = entry['action']
module = importlib.import_module('actions.%s' % name)
if name not in Config.available_actions:
action = '%sAction' % name
Config.available_actions[name] = getattr(module, action)
self.actions.append(Config.available_actions[name](entry))
def match(self, line):
for action in self.actions:
if action.matches(line):
result = action.run_on(line)
if result is None:
return ''
return result
return line
class DefaultConfig(Config):
def __init__(self):
user_default = os.path.expanduser(os.path.join('~', '.snakewatch', 'default.json'))
if os.path.exists(user_default):
cfg = user_default
else:
cfg = [
{
'regex': '.*',
'action': 'Print',
},
]
super(DefaultConfig, self).__init__(cfg)
|
5f1632cf1f307688e4884988e03a1678557bb79c
|
erpnext/hr/doctype/training_feedback/training_feedback.py
|
erpnext/hr/doctype/training_feedback/training_feedback.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
status = None
for e in training_event.employees:
if e.employee == self.employee:
status = 'Feedback Submitted'
break
if status:
frappe.db.set_value("Training Event", self.training_event, "status", status)
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
event_status = None
for e in training_event.employees:
if e.employee == self.employee:
event_status = 'Feedback Submitted'
break
if event_status:
frappe.db.set_value("Training Event", self.training_event, "event_status", event_status)
|
Use event_status instead of status
|
fix(hr): Use event_status instead of status
Training Feedback DocType has event_status field (not status)
This was broken since PR #10379, PR #17197 made this failure explicit.
|
Python
|
agpl-3.0
|
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
status = None
for e in training_event.employees:
if e.employee == self.employee:
status = 'Feedback Submitted'
break
if status:
frappe.db.set_value("Training Event", self.training_event, "status", status)
fix(hr): Use event_status instead of status
Training Feedback DocType has event_status field (not status)
This was broken since PR #10379, PR #17197 made this failure explicit.
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
event_status = None
for e in training_event.employees:
if e.employee == self.employee:
event_status = 'Feedback Submitted'
break
if event_status:
frappe.db.set_value("Training Event", self.training_event, "event_status", event_status)
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
status = None
for e in training_event.employees:
if e.employee == self.employee:
status = 'Feedback Submitted'
break
if status:
frappe.db.set_value("Training Event", self.training_event, "status", status)
<commit_msg>fix(hr): Use event_status instead of status
Training Feedback DocType has event_status field (not status)
This was broken since PR #10379, PR #17197 made this failure explicit.<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
event_status = None
for e in training_event.employees:
if e.employee == self.employee:
event_status = 'Feedback Submitted'
break
if event_status:
frappe.db.set_value("Training Event", self.training_event, "event_status", event_status)
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
status = None
for e in training_event.employees:
if e.employee == self.employee:
status = 'Feedback Submitted'
break
if status:
frappe.db.set_value("Training Event", self.training_event, "status", status)
fix(hr): Use event_status instead of status
Training Feedback DocType has event_status field (not status)
This was broken since PR #10379, PR #17197 made this failure explicit.# -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
event_status = None
for e in training_event.employees:
if e.employee == self.employee:
event_status = 'Feedback Submitted'
break
if event_status:
frappe.db.set_value("Training Event", self.training_event, "event_status", event_status)
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
status = None
for e in training_event.employees:
if e.employee == self.employee:
status = 'Feedback Submitted'
break
if status:
frappe.db.set_value("Training Event", self.training_event, "status", status)
<commit_msg>fix(hr): Use event_status instead of status
Training Feedback DocType has event_status field (not status)
This was broken since PR #10379, PR #17197 made this failure explicit.<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe import _
class TrainingFeedback(Document):
def validate(self):
training_event = frappe.get_doc("Training Event", self.training_event)
if training_event.docstatus != 1:
frappe.throw(_('{0} must be submitted').format(_('Training Event')))
def on_submit(self):
training_event = frappe.get_doc("Training Event", self.training_event)
event_status = None
for e in training_event.employees:
if e.employee == self.employee:
event_status = 'Feedback Submitted'
break
if event_status:
frappe.db.set_value("Training Event", self.training_event, "event_status", event_status)
|
2e95d941d6b5a942fc88558972758d603e0afb37
|
setup.py
|
setup.py
|
"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
* `development version
<http://bitbucket.org/danjac/flask-Script/get/tip.gz#egg=Flask-Script-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://bitbucket.org/danjac/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://github.com/rduplain/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
Update URL, need to set dev version on GitHub.
|
Update URL, need to set dev version on GitHub.
|
Python
|
bsd-3-clause
|
dext0r/flask-script,denismakogon/flask-script,z4y4ts/flask-script,xingkaixin/flask-script,blakev/flask-script,wjt/flask-script,z4y4ts/flask-script
|
"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
* `development version
<http://bitbucket.org/danjac/flask-Script/get/tip.gz#egg=Flask-Script-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://bitbucket.org/danjac/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Update URL, need to set dev version on GitHub.
|
"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://github.com/rduplain/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
* `development version
<http://bitbucket.org/danjac/flask-Script/get/tip.gz#egg=Flask-Script-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://bitbucket.org/danjac/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Update URL, need to set dev version on GitHub.<commit_after>
|
"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://github.com/rduplain/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
* `development version
<http://bitbucket.org/danjac/flask-Script/get/tip.gz#egg=Flask-Script-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://bitbucket.org/danjac/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
Update URL, need to set dev version on GitHub."""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://github.com/rduplain/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
<commit_before>"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
* `development version
<http://bitbucket.org/danjac/flask-Script/get/tip.gz#egg=Flask-Script-dev>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://bitbucket.org/danjac/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
<commit_msg>Update URL, need to set dev version on GitHub.<commit_after>"""
Flask-Script
--------------
Flask support for writing external scripts.
Links
`````
* `documentation <http://packages.python.org/Flask-Script>`_
"""
from setuptools import setup
setup(
name='Flask-Script',
version='0.3.2',
url='http://github.com/rduplain/flask-script',
license='BSD',
author='Dan Jacob',
author_email='danjac354@gmail.com',
maintainer='Ron DuPlain',
maintainer_email='ron.duplain@gmail.com',
description='Scripting support for Flask',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'argparse',
],
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
83938c9bf7aafc1f7a2a6b9594279600012ee7ef
|
setup.py
|
setup.py
|
# coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
# coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
Update Python supported versions classifiers
|
Update Python supported versions classifiers
|
Python
|
bsd-3-clause
|
eriol/pypel
|
# coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
Update Python supported versions classifiers
|
# coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
<commit_before># coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
<commit_msg>Update Python supported versions classifiers<commit_after>
|
# coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
# coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
Update Python supported versions classifiers# coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
<commit_before># coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
<commit_msg>Update Python supported versions classifiers<commit_after># coding: utf-8
"""pypel setup file.
THIS SOFTWARE IS UNDER BSD LICENSE.
Copyright (c) 2012-2015 Daniele Tricoli <eriol@mornie.org>
Read LICENSE for more informations.
"""
import distutils.core
import os.path
from pypel import __version__
def read(filename):
"""Small tool function to read file content."""
return open(os.path.join(os.path.dirname(__file__), filename)).read()
classifiers = '''
Development Status :: 3 - Alpha
Environment :: Console
Intended Audience :: End Users/Desktop
License :: OSI Approved :: BSD License
Operating System :: POSIX
Programming Language :: Python
Programming Language :: Python :: 2
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.4
Topic :: Office/Business :: Financial
'''.strip().splitlines()
distutils.core.setup(
name = 'pypel',
version = __version__,
license = 'BSD',
description = 'simple tool to manage receipts',
long_description = read('README.rst'),
classifiers = classifiers,
url = 'http://mornie.org/projects/pypel/',
author = 'Daniele Tricoli',
author_email = 'eriol@mornie.org',
packages = ['pypel'],
package_dir = dict(pypel='pypel'),
scripts = ['bin/pypel']
)
|
154e1b5b7e8c5fe90197bea13cf653c976ae6a2f
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis'
],
extras_require={
'develop': 'flake8'
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages()
)
|
from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Telecommunications Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Communications :: Ham Radio',
],
license='AGPLv3',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis',
],
extras_require={
'develop': 'flake8',
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages(),
)
|
Add some classifiers and license metadata
|
Add some classifiers and license metadata
|
Python
|
agpl-3.0
|
adamkalis/satnogs-client,adamkalis/satnogs-client
|
from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis'
],
extras_require={
'develop': 'flake8'
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages()
)
Add some classifiers and license metadata
|
from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Telecommunications Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Communications :: Ham Radio',
],
license='AGPLv3',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis',
],
extras_require={
'develop': 'flake8',
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages(),
)
|
<commit_before>from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis'
],
extras_require={
'develop': 'flake8'
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages()
)
<commit_msg>Add some classifiers and license metadata<commit_after>
|
from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Telecommunications Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Communications :: Ham Radio',
],
license='AGPLv3',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis',
],
extras_require={
'develop': 'flake8',
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages(),
)
|
from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis'
],
extras_require={
'develop': 'flake8'
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages()
)
Add some classifiers and license metadatafrom setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Telecommunications Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Communications :: Ham Radio',
],
license='AGPLv3',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis',
],
extras_require={
'develop': 'flake8',
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages(),
)
|
<commit_before>from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis'
],
extras_require={
'develop': 'flake8'
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages()
)
<commit_msg>Add some classifiers and license metadata<commit_after>from setuptools import find_packages, setup
setup(
name='satnogsclient',
version='0.2.5',
url='https://github.com/satnogs/satnogs-client/',
author='SatNOGS team',
author_email='client-dev@satnogs.org',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Telecommunications Industry',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Communications :: Ham Radio',
],
license='AGPLv3',
description='SatNOGS Client',
zip_safe=False,
install_requires=[
'APScheduler',
'SQLAlchemy',
'requests',
'validators',
'python-dateutil',
'ephem',
'pytz',
'flask',
'pyopenssl',
'pyserial',
'flask-socketio',
'redis',
],
extras_require={
'develop': 'flake8',
},
entry_points={
'console_scripts': ['satnogs-client=satnogsclient.main:main'],
},
include_package_data=True,
packages=find_packages(),
)
|
98170d95eba8093ab07ec481209f4e1b9cb99403
|
setup.py
|
setup.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphsLib = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphs2ufo = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
|
Rename console script back to glyphs2ufo
|
Rename console script back to glyphs2ufo
As long as the console script is strictly a Glyphs -> UFO converter,
this is a more appropriate name.
|
Python
|
apache-2.0
|
googlei18n/glyphsLib,googlefonts/glyphsLib
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphsLib = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
Rename console script back to glyphs2ufo
As long as the console script is strictly a Glyphs -> UFO converter,
this is a more appropriate name.
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphs2ufo = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
|
<commit_before># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphsLib = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
<commit_msg>Rename console script back to glyphs2ufo
As long as the console script is strictly a Glyphs -> UFO converter,
this is a more appropriate name.<commit_after>
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphs2ufo = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphsLib = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
Rename console script back to glyphs2ufo
As long as the console script is strictly a Glyphs -> UFO converter,
this is a more appropriate name.# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphs2ufo = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
|
<commit_before># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphsLib = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
<commit_msg>Rename console script back to glyphs2ufo
As long as the console script is strictly a Glyphs -> UFO converter,
this is a more appropriate name.<commit_after># Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
setup(
name='glyphsLib',
version='1.0',
packages=['glyphsLib'],
entry_points={
"console_scripts": [
"glyphs2ufo = glyphsLib.__main__:main"
]
},
package_dir={'': 'Lib'}
)
|
c69b45660fcad1d9966b9d2c5ea135436fdac93b
|
setup.py
|
setup.py
|
from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
|
from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
|
Add python_requires to help pip
|
Add python_requires to help pip
|
Python
|
mit
|
hMatoba/Piexif
|
from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
Add python_requires to help pip
|
from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
|
<commit_before>from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
<commit_msg>Add python_requires to help pip<commit_after>
|
from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
|
from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
Add python_requires to help pipfrom setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
|
<commit_before>from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
<commit_msg>Add python_requires to help pip<commit_after>from setuptools import setup
import sys
import piexif
sys.path.append('./piexif')
sys.path.append('./tests')
with open("README.rst", "r") as f:
description = f.read()
setup(
name = "piexif",
version = piexif.VERSION,
author = "hMatoba",
author_email = "hiroaki.mtb@outlook.com",
description = "To simplify exif manipulations with python. " +
"Writing, reading, and more...",
long_description = description,
license = "MIT",
keywords = ["exif", "jpeg"],
url = "https://github.com/hMatoba/Piexif",
packages = ['piexif'],
test_suite = 's_test.suite',
python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: IronPython",
"Programming Language :: Python :: Implementation :: PyPy",
"License :: OSI Approved :: MIT License",
"Topic :: Multimedia",
"Topic :: Printing",
]
)
|
1c30f3a7985265588acf23fb4ed3a3905c990ba4
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.9",
],
)
|
import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.8",
],
)
|
Revert "Wagtailmenus works with wagtail 1.8 too"
|
Revert "Wagtailmenus works with wagtail 1.8 too"
This reverts commit 1e4b7eb5301c0db3e29abf539c5f4b54d11720b8.
|
Python
|
mit
|
rkhleics/wagtailmenus,ababic/wagtailmenus,ababic/wagtailmenus,ababic/wagtailmenus,rkhleics/wagtailmenus,rkhleics/wagtailmenus
|
import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.9",
],
)
Revert "Wagtailmenus works with wagtail 1.8 too"
This reverts commit 1e4b7eb5301c0db3e29abf539c5f4b54d11720b8.
|
import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.8",
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.9",
],
)
<commit_msg>Revert "Wagtailmenus works with wagtail 1.8 too"
This reverts commit 1e4b7eb5301c0db3e29abf539c5f4b54d11720b8.<commit_after>
|
import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.8",
],
)
|
import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.9",
],
)
Revert "Wagtailmenus works with wagtail 1.8 too"
This reverts commit 1e4b7eb5301c0db3e29abf539c5f4b54d11720b8.import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.8",
],
)
|
<commit_before>import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.9",
],
)
<commit_msg>Revert "Wagtailmenus works with wagtail 1.8 too"
This reverts commit 1e4b7eb5301c0db3e29abf539c5f4b54d11720b8.<commit_after>import os
from setuptools import setup, find_packages
from wagtailmenus import __version__
with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme:
README = readme.read()
# allow setup.py to be run from any path
os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir)))
setup(
name="wagtailmenus",
version=__version__,
author="Andy Babic",
author_email="ababic@rkh.co.uk",
description=("An app to help you manage menus in your Wagtail projects "
"more consistently."),
long_description=README,
packages=find_packages(),
license="MIT",
keywords="wagtail cms model utility",
download_url="https://github.com/rkhleics/wagtailmenus/tarball/v2.0.1",
url="https://github.com/rkhleics/wagtailmenus/tree/stable/2.0.x",
include_package_data=True,
zip_safe=False,
classifiers=[
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
'Topic :: Internet :: WWW/HTTP',
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
install_requires=[
"wagtail>=1.5,<1.8",
],
)
|
03e39599e4f4abac871b40563e04aa3a0a781102
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=open('README.md').read(),
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
|
#!/usr/bin/env python
from setuptools import setup
try:
import pypandoc
description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
description = open('README.md').read()
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=description,
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
|
Use pandoc to convert README from MD to RST.
|
Use pandoc to convert README from MD to RST.
|
Python
|
mit
|
johnwalker/djoser,fladi/djoser,PingaxAnalytics/koob_auth,avances123/djoser,unacast/djoser,dokenzy/djoser,barseghyanartur/djoser,vandoornik/djoser,akalipetis/djoser,yiyocx/djoser,akalipetis/djoser,sunscrapers/djoser,carlosfunk/djoser,liyocee/djoser,sunscrapers/djoser,apokinsocha/djoser,mjuopperi/djoser,sunscrapers/djoser
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=open('README.md').read(),
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
Use pandoc to convert README from MD to RST.
|
#!/usr/bin/env python
from setuptools import setup
try:
import pypandoc
description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
description = open('README.md').read()
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=description,
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=open('README.md').read(),
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
<commit_msg>Use pandoc to convert README from MD to RST.<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
try:
import pypandoc
description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
description = open('README.md').read()
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=description,
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=open('README.md').read(),
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
Use pandoc to convert README from MD to RST.#!/usr/bin/env python
from setuptools import setup
try:
import pypandoc
description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
description = open('README.md').read()
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=description,
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=open('README.md').read(),
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
<commit_msg>Use pandoc to convert README from MD to RST.<commit_after>#!/usr/bin/env python
from setuptools import setup
try:
import pypandoc
description = pypandoc.convert('README.md', 'rst')
except (IOError, ImportError):
description = open('README.md').read()
setup(
name='djoser',
version='0.0.1',
packages=['djoser'],
license='MIT',
author='SUNSCRAPERS',
description='REST version of Django authentication system.',
author_email='info@sunscrapers.com',
long_description=description,
install_requires=[
'Django>=1.5',
'djangorestframework>=2.4.0',
],
tests_require=[
'djet>=0.0.10'
],
include_package_data=True,
zip_safe=False,
url='https://github.com/sunscrapers/djoser',
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
|
3774f234f0d3eaf08bf3b6ed713a949346e69fea
|
setup.py
|
setup.py
|
#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='GPL',
install_requires=[
'setuptools'
],
scripts=scripts,
)
|
#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='unspecified',
install_requires=[
'setuptools',
'networkx'
],
scripts=scripts,
)
|
Update license and add networkx dependency
|
Update license and add networkx dependency
|
Python
|
bsd-3-clause
|
mininet/mininet,mininet/mininet,mininet/mininet
|
#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='GPL',
install_requires=[
'setuptools'
],
scripts=scripts,
)
Update license and add networkx dependency
|
#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='unspecified',
install_requires=[
'setuptools',
'networkx'
],
scripts=scripts,
)
|
<commit_before>#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='GPL',
install_requires=[
'setuptools'
],
scripts=scripts,
)
<commit_msg>Update license and add networkx dependency<commit_after>
|
#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='unspecified',
install_requires=[
'setuptools',
'networkx'
],
scripts=scripts,
)
|
#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='GPL',
install_requires=[
'setuptools'
],
scripts=scripts,
)
Update license and add networkx dependency#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='unspecified',
install_requires=[
'setuptools',
'networkx'
],
scripts=scripts,
)
|
<commit_before>#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='GPL',
install_requires=[
'setuptools'
],
scripts=scripts,
)
<commit_msg>Update license and add networkx dependency<commit_after>#!/usr/bin/env python
'''Setuptools params'''
from setuptools import setup, find_packages
from os.path import join
scripts = [join('bin', filename) for filename in
['mn', 'mnclean']]
modname = distname = 'mininet'
setup(
name=distname,
version='0.0.0',
description='Process-based OpenFlow emulator',
author='Bob Lantz',
author_email='rlantz@cs.stanford.edu',
packages=find_packages(exclude='test'),
long_description="""\
Insert longer description here.
""",
classifiers=[
"License :: OSI Approved :: GNU General Public License (GPL)",
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Internet",
],
keywords='networking protocol Internet OpenFlow',
license='unspecified',
install_requires=[
'setuptools',
'networkx'
],
scripts=scripts,
)
|
a34116f72a92af10c93e0490137c82849860ceb9
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
|
from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
|
Add python 3.6 to the classifiers
|
Add python 3.6 to the classifiers
|
Python
|
bsd-3-clause
|
Alir3z4/django-cuser
|
from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
Add python 3.6 to the classifiers
|
from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
<commit_msg>Add python 3.6 to the classifiers<commit_after>
|
from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
|
from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
Add python 3.6 to the classifiersfrom setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
<commit_msg>Add python 3.6 to the classifiers<commit_after>from setuptools import setup, find_packages
setup(
name='django-cuser',
version=".".join(map(str, __import__("cuser").__version__)),
description='Middleware to make user information always available.',
long_description=open('README.rst').read(),
author='Alireza Savand',
author_email='alireza.savand@gmail.com',
url='https://github.com/Alir3z4/django-cuser',
packages=find_packages(exclude=["django_cuser"]),
install_requires=['Django>=1.8'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 6 - Mature",
"Environment :: Web Environment",
"Framework :: Django",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries",
],
)
|
f796323f484f575b3d285f74d2e34af1d701b227
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='1.0',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='0.1',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
|
Set version to 0.1 for now
|
Set version to 0.1 for now
|
Python
|
mit
|
educreations/celery-queued-once
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='1.0',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
Set version to 0.1 for now
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='0.1',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='1.0',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
<commit_msg>Set version to 0.1 for now<commit_after>
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='0.1',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='1.0',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
Set version to 0.1 for now#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='0.1',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
|
<commit_before>#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='1.0',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
<commit_msg>Set version to 0.1 for now<commit_after>#!/usr/bin/env python
from setuptools import setup
setup(
name='celery-queued-once',
version='0.1',
description='Celery base task de-duplicating tasks',
author='Corey Farwell',
author_email='corey@educreations.com',
packages=['queued_once'],
install_requires=['celery', 'django'],
)
|
f3dfad2448ff90386891f1490fc2ba3c0f5dae35
|
setup.py
|
setup.py
|
"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
|
"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
|
Change README file format from .md to .rst
|
Change README file format from .md to .rst
|
Python
|
mit
|
essanpupil/pygoogling
|
"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
Change README file format from .md to .rst
|
"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
|
<commit_before>"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
<commit_msg>Change README file format from .md to .rst<commit_after>
|
"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
|
"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
Change README file format from .md to .rst"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
|
<commit_before>"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
<commit_msg>Change README file format from .md to .rst<commit_after>"""Setup script for pygoogling."""
from codecs import open as open_codec
from os import path
from setuptools import setup
HERE = path.abspath(path.dirname(__file__))
with open_codec(path.join(HERE, 'README.rst'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
setup(
name='pygoogling',
version='0.0.2',
description='Python library to do google search',
long_description=LONG_DESCRIPTION,
url='https://github.com/essanpupil/pygoogling',
author='Ikhsan Noor Rosyidin',
author_email='jakethitam1985@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries :: Python Modules',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
],
keywords='google search python module',
py_modules=['pygoogling.googling'],
install_requires=['bs4', 'requests', 'html5lib'],
)
|
198fb06b2de1b694f0350278563bf3c8f79f615d
|
setup.py
|
setup.py
|
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson>=0.2.0',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
|
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
|
Remove the version specifier for Henson
|
Remove the version specifier for Henson
Henson 0.2 doesn't exist.
|
Python
|
apache-2.0
|
iheartradio/Henson-Logging
|
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson>=0.2.0',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
Remove the version specifier for Henson
Henson 0.2 doesn't exist.
|
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
|
<commit_before>from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson>=0.2.0',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
<commit_msg>Remove the version specifier for Henson
Henson 0.2 doesn't exist.<commit_after>
|
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
|
from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson>=0.2.0',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
Remove the version specifier for Henson
Henson 0.2 doesn't exist.from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
|
<commit_before>from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson>=0.2.0',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
<commit_msg>Remove the version specifier for Henson
Henson 0.2 doesn't exist.<commit_after>from setuptools import find_packages, setup
from setuptools.command.test import test as TestCommand
import sys
class PyTest(TestCommand):
def finalize_options(self):
super().finalize_options()
self.test_args = []
self.test_suite = True
def run_tests(self):
import pytest
sys.exit(pytest.main(self.test_args))
def read(filename):
with open(filename) as f:
return f.read()
setup(
name='Henson-Logging',
version='0.3.0',
author='Andy Dirnberger, Jon Banafato, and others',
author_email='henson@iheart.com',
url='https://henson-logging.rtfd.org',
description='A library to use structured logging with a Henson application.',
long_description=read('README.rst'),
license='Apache License, Version 2.0',
packages=find_packages(exclude=['tests']),
zip_safe=False,
install_requires=[
'Henson',
'structlog',
],
tests_require=[
'pytest',
],
cmdclass={
'test': PyTest,
},
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Software Development :: Libraries :: Application Frameworks',
]
)
|
dc17543e55759267bd10ece8b572643d1231dd31
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
py_modules=['txkazoo'],
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
|
from setuptools import find_packages, setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
packages=find_packages(),
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
|
Use find_packages to find packages instead of having them somewhere manually
|
Use find_packages to find packages instead of having them somewhere
manually
|
Python
|
apache-2.0
|
rackerlabs/txkazoo
|
from setuptools import setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
py_modules=['txkazoo'],
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
Use find_packages to find packages instead of having them somewhere
manually
|
from setuptools import find_packages, setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
packages=find_packages(),
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
|
<commit_before>from setuptools import setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
py_modules=['txkazoo'],
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
<commit_msg>Use find_packages to find packages instead of having them somewhere
manually<commit_after>
|
from setuptools import find_packages, setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
packages=find_packages(),
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
|
from setuptools import setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
py_modules=['txkazoo'],
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
Use find_packages to find packages instead of having them somewhere
manuallyfrom setuptools import find_packages, setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
packages=find_packages(),
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
|
<commit_before>from setuptools import setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
py_modules=['txkazoo'],
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
<commit_msg>Use find_packages to find packages instead of having them somewhere
manually<commit_after>from setuptools import find_packages, setup
setup(
name='txkazoo',
version='0.0.4',
description='Twisted binding for Kazoo',
maintainer='Manish Tomar',
maintainer_email='manish.tomar@rackspace.com',
license='Apache 2.0',
packages=find_packages(),
install_requires=['twisted==13.2.0', 'kazoo==2.0b1']
)
|
f2399e49e657848a58022b63915fad7969841b62
|
setup.py
|
setup.py
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio', 'portfolio.tests'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
|
Add tests to package list.
|
Add tests to package list.
Missed this earlier. Oops.
|
Python
|
bsd-3-clause
|
blturner/django-portfolio,blturner/django-portfolio,benspaulding/django-portfolio
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
Add tests to package list.
Missed this earlier. Oops.
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio', 'portfolio.tests'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
|
<commit_before>import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
<commit_msg>Add tests to package list.
Missed this earlier. Oops.<commit_after>
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio', 'portfolio.tests'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
|
import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
Add tests to package list.
Missed this earlier. Oops.import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio', 'portfolio.tests'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
|
<commit_before>import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
<commit_msg>Add tests to package list.
Missed this earlier. Oops.<commit_after>import os
from distutils.core import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-portfolio',
version='0.8.0',
description='Web Worker\'s Portfolio for Django.',
url='http://github.com/benspaulding/django-portfolio/',
author='Ben Spaulding',
author_email='ben@benspaulding.com',
license='BSD',
download_url='http://github.com/benspaulding/django-portfolio/tarball/v0.8.0',
long_description = read('README.rst'),
packages = ['portfolio', 'portfolio.tests'],
package_data = {'portfolio': ['locale/*/LC_MESSAGES/*',
'templates/portfolio/*']},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Site Management'],
)
|
815608b3c35c754f50d1651ea8124315524dacb0
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
# Pinned because django-dfp has no pin while jmbo has, but setuptools
# is not smart enough to resolve this. Also setuptools ignores the pin
# if in tests_require.
'django>=1.4,<1.7',
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
'psycopg2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
Add pins so tests pass outside of Travis as well
|
Add pins so tests pass outside of Travis as well
|
Python
|
bsd-3-clause
|
praekelt/jmbo-banner,praekelt/jmbo-banner
|
from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
Add pins so tests pass outside of Travis as well
|
from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
# Pinned because django-dfp has no pin while jmbo has, but setuptools
# is not smart enough to resolve this. Also setuptools ignores the pin
# if in tests_require.
'django>=1.4,<1.7',
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
'psycopg2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
<commit_msg>Add pins so tests pass outside of Travis as well<commit_after>
|
from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
# Pinned because django-dfp has no pin while jmbo has, but setuptools
# is not smart enough to resolve this. Also setuptools ignores the pin
# if in tests_require.
'django>=1.4,<1.7',
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
'psycopg2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
Add pins so tests pass outside of Travis as wellfrom setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
# Pinned because django-dfp has no pin while jmbo has, but setuptools
# is not smart enough to resolve this. Also setuptools ignores the pin
# if in tests_require.
'django>=1.4,<1.7',
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
'psycopg2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
<commit_before>from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
<commit_msg>Add pins so tests pass outside of Travis as well<commit_after>from setuptools import setup, find_packages
setup(
name='jmbo-banner',
version='0.5',
description='Jmbo banner app.',
long_description=open('README.rst', 'r').read() + open('AUTHORS.rst', 'r').read() + open('CHANGELOG.rst', 'r').read(),
author='Praekelt Foundation',
author_email='dev@praekelt.com',
license='BSD',
url='http://github.com/praekelt/jmbo-banner',
packages=find_packages(),
install_requires=[
# Pinned because django-dfp has no pin while jmbo has, but setuptools
# is not smart enough to resolve this. Also setuptools ignores the pin
# if in tests_require.
'django>=1.4,<1.7',
'jmbo>=1.1.1',
'django-dfp>=0.3.3',
],
tests_require=[
'django-setuptest>=0.1.4',
'psycopg2',
],
test_suite="setuptest.setuptest.SetupTestSuite",
include_package_data=True,
classifiers=[
"Programming Language :: Python",
"License :: OSI Approved :: BSD License",
"Development Status :: 4 - Beta",
"Operating System :: OS Independent",
"Framework :: Django",
"Intended Audience :: Developers",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
],
zip_safe=False,
)
|
d3cb042505a5a4905537ece9c85a62286c50ca4a
|
setup.py
|
setup.py
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
|
Add generic Python 2 classifier.
|
Add generic Python 2 classifier.
|
Python
|
apache-2.0
|
sendwithus/sendwithus_python,mefyl/sendwithus_python
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
Add generic Python 2 classifier.
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
|
<commit_before>from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
<commit_msg>Add generic Python 2 classifier.<commit_after>
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
|
from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
Add generic Python 2 classifier.from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
|
<commit_before>from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
<commit_msg>Add generic Python 2 classifier.<commit_after>from distutils.core import setup
from setuptools import find_packages
setup(
name='sendwithus',
version='1.0.12',
author='sendwithus',
author_email='us@sendwithus.com',
packages=find_packages(),
scripts=[],
url='https://github.com/sendwithus/sendwithus_python',
license='LICENSE.txt',
description='Python API client for sendwithus.com',
long_description=open('README.md').read(),
test_suite="sendwithus.test",
install_requires=[
"requests >= 1.1.0"
],
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"License :: OSI Approved :: Apache Software License",
"Development Status :: 5 - Production/Stable",
"Topic :: Communications :: Email"
]
)
|
c83291ee2931dcf7918c574efc86ead01dbc725c
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/stephane/django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/webstack/webstack-django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
Update URL of new project
|
Update URL of new project
|
Python
|
bsd-3-clause
|
tino/webstack-django-sorting,artscoop/webstack-django-sorting,makinacorpus/webstack-django-sorting,artscoop/webstack-django-sorting
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/stephane/django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
Update URL of new project
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/webstack/webstack-django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before># -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/stephane/django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Update URL of new project<commit_after>
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/webstack/webstack-django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/stephane/django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
Update URL of new project# -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/webstack/webstack-django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
<commit_before># -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/stephane/django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
<commit_msg>Update URL of new project<commit_after># -*- coding: utf-8 -*-
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
__version__ = '0.4'
setup(
name='webstack-django-sorting',
version=__version__,
description="Easy sorting of tables with Django",
long_description=open('README.rst').read(),
author='Stéphane Raimbault',
author_email='stephane.raimbault@webstack.fr',
url='http://github.com/webstack/webstack-django-sorting/',
packages=[
'webstack_django_sorting',
'webstack_django_sorting.templatetags',
],
package_dir={'webstack_django_sorting': 'webstack_django_sorting'},
include_package_data=True,
zip_safe=False,
keywords='sorting,pagination,django',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Natural Language :: English',
'Framework :: Django',
'Environment :: Web Environment',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
)
|
934cc5692c31111dc787b31cbf369be2017ec1c3
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
|
from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle',
'responses'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
|
Fix failing build: added getsentry/responses as installation requirement
|
Fix failing build: added getsentry/responses as installation requirement
|
Python
|
apache-2.0
|
cqse/teamscale-client-python
|
from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
Fix failing build: added getsentry/responses as installation requirement
|
from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle',
'responses'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
|
<commit_before>from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
<commit_msg>Fix failing build: added getsentry/responses as installation requirement<commit_after>
|
from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle',
'responses'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
|
from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
Fix failing build: added getsentry/responses as installation requirementfrom setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle',
'responses'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
|
<commit_before>from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
<commit_msg>Fix failing build: added getsentry/responses as installation requirement<commit_after>from setuptools import setup
setup(
name="teamscale-client",
version="4.1.0",
author="Thomas Kinnen - CQSE GmbH",
author_email="kinnen@cqse.eu",
description=("A simple service client to interact with Teamscale's REST API."),
license="Apache",
keywords="rest api teamscale",
url="https://github.com/cqse/teamscale-client-python",
packages=['teamscale_client'],
long_description="A simple service client to interact with Teamscale's REST API.",
classifiers=[
"Topic :: Utilities",
],
install_requires=[
'simplejson',
'requests>=2.0',
'jsonpickle',
'responses'
],
tests_require=[
'pytest',
'responses'
],
setup_requires=["pytest-runner"]
)
|
2f87d814a973f1e0ae456fde6d42947a5a72f017
|
setup.py
|
setup.py
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt'])
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt',
'pandas >=0.19.1, <=0.19.1'])
|
Add pandas as required package
|
Add pandas as required package
|
Python
|
mit
|
oemof/oemof.db
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt'])
Add pandas as required package
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt',
'pandas >=0.19.1, <=0.19.1'])
|
<commit_before>#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt'])
<commit_msg>Add pandas as required package<commit_after>
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt',
'pandas >=0.19.1, <=0.19.1'])
|
#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt'])
Add pandas as required package#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt',
'pandas >=0.19.1, <=0.19.1'])
|
<commit_before>#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt'])
<commit_msg>Add pandas as required package<commit_after>#! /usr/bin/env python
from setuptools import find_packages, setup
setup(name='oemof.db',
version='0.0.4',
description='The oemof database extension',
namespace_package = ['oemof'],
packages=find_packages(),
package_dir={'oemof': 'oemof'},
install_requires=['sqlalchemy >= 1.0',
'keyring >= 4.0',
'shapely',
'psycopg2',
'keyrings.alt',
'pandas >=0.19.1, <=0.19.1'])
|
e85f08b816d754927eae4a03ff26b9ad9dcd6fb3
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.0.1',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.1.0',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
Update modeldict req for bugfix on deletion
|
Update modeldict req for bugfix on deletion
|
Python
|
apache-2.0
|
brilliant-org/gargoyle,zapier/gargoyle,disqus/gutter-django,frewsxcv/gargoyle,YPlan/gargoyle,nkovshov/gargoyle,brilliant-org/gargoyle,Raekkeri/gargoyle,monokrome/gargoyle,disqus/gutter-django,vikingco/gargoyle,roverdotcom/gargoyle,kalail/gutter,disqus/gutter-django,YPlan/gargoyle,nkovshov/gargoyle,blueprinthealth/gargoyle,disqus/gargoyle,blueprinthealth/gargoyle,nkovshov/gargoyle,disqus/gargoyle,disqus/gutter,monokrome/gargoyle,Raekkeri/gargoyle,vikingco/gargoyle,YPlan/gargoyle,Raekkeri/gargoyle,disqus/gutter-django,vikingco/gargoyle,roverdotcom/gargoyle,frewsxcv/gargoyle,monokrome/gargoyle,roverdotcom/gargoyle,zapier/gargoyle,graingert/gutter-django,graingert/gutter-django,blueprinthealth/gargoyle,kalail/gutter,disqus/gargoyle,brilliant-org/gargoyle,frewsxcv/gargoyle,graingert/gutter-django,disqus/gutter,kalail/gutter
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.0.1',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)Update modeldict req for bugfix on deletion
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.1.0',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.0.1',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)<commit_msg>Update modeldict req for bugfix on deletion<commit_after>
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.1.0',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.0.1',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)Update modeldict req for bugfix on deletion#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.1.0',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
<commit_before>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.0.1',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)<commit_msg>Update modeldict req for bugfix on deletion<commit_after>#!/usr/bin/env python
try:
from setuptools import setup, find_packages
except ImportError:
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
tests_require = [
'South',
]
setup(
name='gargoyle',
version='0.1.8',
author='DISQUS',
author_email='opensource@disqus.com',
url='http://github.com/disqus/gargoyle',
description = 'Gargoyle is a platform built on top of Django which allows you to switch functionality of your application on and off based on conditions.',
packages=find_packages(exclude="example_project"),
zip_safe=False,
install_requires=[
'Django>=1.1',
'django-modeldict>=1.1.0',
'nexus>=0.1.7',
'django-jsonfield',
],
tests_require=tests_require,
extras_require={'test': tests_require},
test_suite='runtests.runtests',
include_package_data=True,
classifiers=[
'Framework :: Django',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
|
58cecd37825e19e0bed42a7fa44c34e799d67ec1
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
Support Python 3.7, Django 2.2
|
Support Python 3.7, Django 2.2
|
Python
|
bsd-3-clause
|
blancltd/django-paginationlinks
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
Support Python 3.7, Django 2.2
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
<commit_before>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
<commit_msg>Support Python 3.7, Django 2.2<commit_after>
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
Support Python 3.7, Django 2.2#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
<commit_before>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
],
license='BSD',
)
<commit_msg>Support Python 3.7, Django 2.2<commit_after>#!/usr/bin/env python
from codecs import open
from setuptools import find_packages, setup
with open('README.rst', 'r', 'utf-8') as f:
readme = f.read()
setup(
name='django-paginationlinks',
version='0.1.1',
description='Django Pagination Links',
long_description=readme,
url='https://github.com/developersociety/django-paginationlinks',
maintainer='The Developer Society',
maintainer_email='studio@dev.ngo',
platforms=['any'],
packages=find_packages(),
classifiers=[
'Environment :: Web Environment',
'Framework :: Django',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.11',
'Framework :: Django :: 2.2',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
license='BSD',
)
|
5a0e780e4d47efebed691fe389ff01a7ee0ff1cb
|
setup.py
|
setup.py
|
from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
return ''
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)
|
from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
raise RuntimeError('Cannot find version information')
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)
|
Raise RuntimeError if __version__ is not found
|
Raise RuntimeError if __version__ is not found
|
Python
|
mit
|
dhercher/lastpass-python,konomae/lastpass-python
|
from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
return ''
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)Raise RuntimeError if __version__ is not found
|
from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
raise RuntimeError('Cannot find version information')
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)
|
<commit_before>from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
return ''
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)<commit_msg>Raise RuntimeError if __version__ is not found<commit_after>
|
from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
raise RuntimeError('Cannot find version information')
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)
|
from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
return ''
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)Raise RuntimeError if __version__ is not foundfrom setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
raise RuntimeError('Cannot find version information')
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)
|
<commit_before>from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
return ''
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)<commit_msg>Raise RuntimeError if __version__ is not found<commit_after>from setuptools import setup
def get_version():
import re
with open('lastpass/__init__.py', 'r') as f:
for line in f:
m = re.match(r'__version__ = [\'"]([^\'"]*)[\'"]', line)
if m:
return m.group(1)
raise RuntimeError('Cannot find version information')
setup(
name='lastpass-python',
version=get_version(),
description='LastPass Python API (unofficial)',
long_description=open('README.rst').read(),
license='MIT',
author='konomae',
author_email='konomae@users.noreply.github.com',
url='https://github.com/konomae/lastpass-python',
packages=['lastpass'],
install_requires=[
"requests>=1.2.1,<=3.0.0",
"pycrypto>=2.6.1",
],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: Implementation :: CPython',
],
)
|
ab97c9fbaad2ae8bc30d63a98bec1fa6fb58dd4d
|
setup.py
|
setup.py
|
import sys
from setuptools import setup
if sys.version_info < (3, 5):
raise Exception("Python 3.5 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
|
import sys
from setuptools import setup
if sys.version_info < (3, 6):
raise Exception("Python 3.6 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
|
Update requirement to Python 3.6
|
Update requirement to Python 3.6
|
Python
|
agpl-3.0
|
blueset/ehForwarderBot
|
import sys
from setuptools import setup
if sys.version_info < (3, 5):
raise Exception("Python 3.5 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
Update requirement to Python 3.6
|
import sys
from setuptools import setup
if sys.version_info < (3, 6):
raise Exception("Python 3.6 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
|
<commit_before>import sys
from setuptools import setup
if sys.version_info < (3, 5):
raise Exception("Python 3.5 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
<commit_msg>Update requirement to Python 3.6<commit_after>
|
import sys
from setuptools import setup
if sys.version_info < (3, 6):
raise Exception("Python 3.6 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
|
import sys
from setuptools import setup
if sys.version_info < (3, 5):
raise Exception("Python 3.5 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
Update requirement to Python 3.6import sys
from setuptools import setup
if sys.version_info < (3, 6):
raise Exception("Python 3.6 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
|
<commit_before>import sys
from setuptools import setup
if sys.version_info < (3, 5):
raise Exception("Python 3.5 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
<commit_msg>Update requirement to Python 3.6<commit_after>import sys
from setuptools import setup
if sys.version_info < (3, 6):
raise Exception("Python 3.6 or higher is required. Your version is %s." % sys.version)
long_description = open('README.rst').read()
__version__ = ""
exec(open('ehforwarderbot/__version__.py').read())
setup(
name='ehforwarderbot',
packages=['ehforwarderbot'],
version=__version__,
description='An extensible message tunneling chat bot framework.',
long_description=long_description,
author='Eana Hufwe',
author_email='ilove@1a23.com',
url='https://github.com/blueset/ehforwarderbot',
license='GPLv3',
python_requires='>=3.6',
keywords=['EFB', 'EH Forwarder Bot', 'Chat tunneling', 'IM', 'messaging'],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: GNU General Public License v3 (GPLv3)",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Topic :: Communications :: Chat",
"Topic :: Software Development :: Libraries :: Application Frameworks",
"Topic :: Utilities"
],
install_requires=[
"PyYaml"
],
entry_points={
"console_scripts": ['ehforwarderbot = ehforwarderbot.__main__:main']
}
)
|
00fd1fff56a7cdf6717a5312f827b3fdeed0c895
|
setup.py
|
setup.py
|
# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='example_cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
|
# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
|
Correct name of package (for production).
|
Correct name of package (for production).
|
Python
|
mit
|
SebastienPittet/cavelink
|
# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='example_cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
Correct name of package (for production).
|
# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
|
<commit_before># coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='example_cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
<commit_msg>Correct name of package (for production).<commit_after>
|
# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
|
# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='example_cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
Correct name of package (for production).# coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
|
<commit_before># coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='example_cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
<commit_msg>Correct name of package (for production).<commit_after># coding: utf-8
"""
A simple module to fetch Cavelink values by parsing the HTML page of sensors.
"""
from setuptools import find_packages, setup
with open('README.rst', 'r') as f:
long_description = f.read()
setup(
name='cavelink',
version='1.1.0',
author='Sébastien Pittet',
author_email='sebastien@pittet.org',
description='Fetch Cavelink data by parsing the webpage of sensors.',
long_description=long_description,
url='https://github.com/SebastienPittet/cavelink',
keywords='speleo cave sensor',
packages=find_packages(),
license='MIT',
platforms='any',
install_requires=['python-dateutil', 'requests'],
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Other Audience'
]
)
|
b4502dd3c258583fe4899a6aac7ac395c2265fad
|
setup.py
|
setup.py
|
#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
|
#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
if version_info < (3, 5):
requirements = ["pathlib"]
else:
requirements = []
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
install_requires=requirements,
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
|
Add back the pathlib dependency on 2.7
|
Add back the pathlib dependency on 2.7
|
Python
|
mit
|
mineo/mpd_pydb,mineo/mpd_pydb
|
#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
Add back the pathlib dependency on 2.7
|
#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
if version_info < (3, 5):
requirements = ["pathlib"]
else:
requirements = []
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
install_requires=requirements,
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
|
<commit_before>#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
<commit_msg>Add back the pathlib dependency on 2.7<commit_after>
|
#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
if version_info < (3, 5):
requirements = ["pathlib"]
else:
requirements = []
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
install_requires=requirements,
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
|
#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
Add back the pathlib dependency on 2.7#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
if version_info < (3, 5):
requirements = ["pathlib"]
else:
requirements = []
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
install_requires=requirements,
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
|
<commit_before>#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
<commit_msg>Add back the pathlib dependency on 2.7<commit_after>#!/usr/bin/env python2
from __future__ import print_function
from setuptools import setup
from sys import version_info
if version_info < (3, 5):
requirements = ["pathlib"]
else:
requirements = []
setup(name="mpd_pydb",
author="Wieland Hoffmann",
author_email="themineo@gmail.com",
packages=["mpd_pydb"],
package_dir={"mpd_pydb": "mpd_pydb"},
download_url="https://github.com/mineo/mpd_pydb/tarball/master",
url="http://github.com/mineo/mpd_pydb",
license="MIT",
classifiers=["Development Status :: 4 - Beta",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",],
description="Module for reading an MPD database",
long_description=open("README.rst").read(),
setup_requires=["setuptools_scm", "pytest-runner"],
use_scm_version={"write_to": "mpd_pydb/version.py"},
install_requires=requirements,
extras_require={
'docs': ['sphinx']
},
tests_require=["pytest"],
)
|
67179d1cf8dab528bc418cdaff71446698a9bd51
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests~=2.9.1',
'requests_oauthlib~=0.6.1',
'six~=1.10.0'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests >=2.9.1, == 2.9.*',
'requests_oauthlib >= 0.6.1, == 0.6.*',
'six >= 1.10.0, == 1.10.*'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
Fix attempt: version specifier ~= is not supported on older installations of pip
|
Fix attempt: version specifier ~= is not supported on older installations of pip
|
Python
|
mit
|
Asana/python-asana,asana/python-asana,asana/python-asana
|
#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests~=2.9.1',
'requests_oauthlib~=0.6.1',
'six~=1.10.0'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
Fix attempt: version specifier ~= is not supported on older installations of pip
|
#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests >=2.9.1, == 2.9.*',
'requests_oauthlib >= 0.6.1, == 0.6.*',
'six >= 1.10.0, == 1.10.*'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
<commit_before>#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests~=2.9.1',
'requests_oauthlib~=0.6.1',
'six~=1.10.0'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
<commit_msg>Fix attempt: version specifier ~= is not supported on older installations of pip<commit_after>
|
#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests >=2.9.1, == 2.9.*',
'requests_oauthlib >= 0.6.1, == 0.6.*',
'six >= 1.10.0, == 1.10.*'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests~=2.9.1',
'requests_oauthlib~=0.6.1',
'six~=1.10.0'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
Fix attempt: version specifier ~= is not supported on older installations of pip#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests >=2.9.1, == 2.9.*',
'requests_oauthlib >= 0.6.1, == 0.6.*',
'six >= 1.10.0, == 1.10.*'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
<commit_before>#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests~=2.9.1',
'requests_oauthlib~=0.6.1',
'six~=1.10.0'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
<commit_msg>Fix attempt: version specifier ~= is not supported on older installations of pip<commit_after>#!/usr/bin/env python
import sys
import os
from setuptools import setup, find_packages
assert sys.version_info >= (2, 6), 'We only support Python 2.6+'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), 'asana'))
setup(
name='asana',
version='0.6.2',
description='Asana API client',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
install_requires=[
'requests >=2.9.1, == 2.9.*',
'requests_oauthlib >= 0.6.1, == 0.6.*',
'six >= 1.10.0, == 1.10.*'
],
author='Asana, Inc',
# author_email='',
url='http://github.com/asana/python-asana',
packages=find_packages(exclude=('tests',)),
keywords='asana',
zip_safe=True,
test_suite='tests')
|
4ffc44b6c33ce77dbd41388e0c4c116064d667d5
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version='0.3.1',
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
|
#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
import girc
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version=girc.__version__,
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
|
Read package version from one location
|
Read package version from one location
|
Python
|
isc
|
DanielOaks/girc,DanielOaks/girc
|
#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version='0.3.1',
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
Read package version from one location
|
#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
import girc
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version=girc.__version__,
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
|
<commit_before>#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version='0.3.1',
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
<commit_msg>Read package version from one location<commit_after>
|
#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
import girc
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version=girc.__version__,
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
|
#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version='0.3.1',
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
Read package version from one location#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
import girc
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version=girc.__version__,
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
|
<commit_before>#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version='0.3.1',
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
<commit_msg>Read package version from one location<commit_after>#!/usr/bin/env python3
# written by Daniel Oaks <daniel@danieloaks.net>
# Released under the ISC license
from setuptools import setup, find_packages
import girc
with open('README.rst') as file:
long_description = file.read()
setup(
name='girc',
version=girc.__version__,
description='A modern Python IRC library for Python 3.4, based on asyncio. In Development.',
long_description=long_description,
author='Daniel Oaks',
author_email='daniel@danieloaks.net',
url='https://github.com/DanielOaks/girc',
packages=find_packages(),
scripts=['girc_test'],
install_requires=['docopt'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Communications :: Chat',
'Topic :: Communications :: Chat :: Internet Relay Chat',
]
)
|
6705b4eb603f69681357a5f71f02e81705ea5e17
|
setup.py
|
setup.py
|
from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring'],
install_requires=['bitstring'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
|
from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring', 'six'],
install_requires=['bitstring', 'six'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
|
Add six as dependency to fix import issue
|
Add six as dependency to fix import issue
|
Python
|
mit
|
use-sparingly/pymp4parse
|
from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring'],
install_requires=['bitstring'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
Add six as dependency to fix import issue
|
from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring', 'six'],
install_requires=['bitstring', 'six'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
|
<commit_before>from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring'],
install_requires=['bitstring'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
<commit_msg>Add six as dependency to fix import issue<commit_after>
|
from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring', 'six'],
install_requires=['bitstring', 'six'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
|
from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring'],
install_requires=['bitstring'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
Add six as dependency to fix import issuefrom distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring', 'six'],
install_requires=['bitstring', 'six'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
|
<commit_before>from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring'],
install_requires=['bitstring'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
<commit_msg>Add six as dependency to fix import issue<commit_after>from distutils.core import setup
try:
import pypandoc
long_description = pypandoc.convert('README.md', 'rst')
except(IOError, ImportError):
long_description = open('README.md').read()
setup(
name='pymp4parse',
version='0.3.0',
packages=[''],
url='https://github.com/use-sparingly/pymp4parse',
license='The MIT License',
author='Alastair Mccormack',
author_email='alastair at alu.media',
description='MP4 / ISO base media file format (ISO/IEC 14496-12 - MPEG-4 Part 12) file parser',
requires=['bitstring', 'six'],
install_requires=['bitstring', 'six'],
long_description=long_description,
data_files=[('', ['README.md'])]
)
|
4fdd9a72a05fa847e435c13af7b1dd33f9dcd34e
|
tests.py
|
tests.py
|
#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
|
#!/usr/bin/python -OO
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
|
Discard docstrings in addition to the -O optimizations.
|
Discard docstrings in addition to the -O optimizations.
|
Python
|
mit
|
whymarrh/jeopardy-parser,dangoldin/jeopardy-parser,dangoldin/jeopardy-parser
|
#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
Discard docstrings in addition to the -O optimizations.
|
#!/usr/bin/python -OO
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
<commit_msg>Discard docstrings in addition to the -O optimizations.<commit_after>
|
#!/usr/bin/python -OO
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
|
#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
Discard docstrings in addition to the -O optimizations.#!/usr/bin/python -OO
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/python -O
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
<commit_msg>Discard docstrings in addition to the -O optimizations.<commit_after>#!/usr/bin/python -OO
# -*- coding: utf-8 -*-
import sqlite3
from parser import SQLITE3_DB_NAME
from random import randint, randrange
def main():
""" Ouputs a random clue (with game ID) from 10 random games for manual verification. """
sql = sqlite3.connect(SQLITE3_DB_NAME)
# list of random game ids
gids = [randint(1, 3790) for i in xrange(10)]
# output format
print "GID".rjust(5), "R -> Category -> Clue text -> Answer"
for gid in gids:
rows = sql.execute("SELECT round, category, clue, answer FROM clues INNER JOIN documents ON clues.id = documents.id LEFT JOIN classifications ON classifications.clueid = clues.id LEFT JOIN categories ON classifications.catid = categories.id WHERE game = ?;", (gid, ))
rows = rows.fetchall()
# some games were skipped over
if len(rows) > 0:
meta = "#%d" % gid
row = randrange(0, len(rows))
print meta.rjust(5), " -> ".join(str(e) for e in rows[row])
if __name__ == "__main__":
main()
|
bdeb28f2f7840c04dbf65b6c0771c121f229e59a
|
tests.py
|
tests.py
|
#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
self.added_path = os.path.join(os.path.dirname(__file__), 'more-test-plugins')
self.added_path = os.path.join(os.path.dirname(__file__), 'some-test-plugins')
sys.path.append(self.added_path)
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
sys.path.append(os.path.join(os.path.dirname(__file__), 'more-test-plugins'))
sys.path.append(os.path.join(os.path.dirname(__file__), 'some-test-plugins'))
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
|
Fix test case for multiple locations of a namespace
|
Fix test case for multiple locations of a namespace
|
Python
|
mit
|
ironfroggy/straight.plugin,pombredanne/straight.plugin
|
#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
self.added_path = os.path.join(os.path.dirname(__file__), 'more-test-plugins')
self.added_path = os.path.join(os.path.dirname(__file__), 'some-test-plugins')
sys.path.append(self.added_path)
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
Fix test case for multiple locations of a namespace
|
#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
sys.path.append(os.path.join(os.path.dirname(__file__), 'more-test-plugins'))
sys.path.append(os.path.join(os.path.dirname(__file__), 'some-test-plugins'))
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
self.added_path = os.path.join(os.path.dirname(__file__), 'more-test-plugins')
self.added_path = os.path.join(os.path.dirname(__file__), 'some-test-plugins')
sys.path.append(self.added_path)
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix test case for multiple locations of a namespace<commit_after>
|
#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
sys.path.append(os.path.join(os.path.dirname(__file__), 'more-test-plugins'))
sys.path.append(os.path.join(os.path.dirname(__file__), 'some-test-plugins'))
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
self.added_path = os.path.join(os.path.dirname(__file__), 'more-test-plugins')
self.added_path = os.path.join(os.path.dirname(__file__), 'some-test-plugins')
sys.path.append(self.added_path)
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
Fix test case for multiple locations of a namespace#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
sys.path.append(os.path.join(os.path.dirname(__file__), 'more-test-plugins'))
sys.path.append(os.path.join(os.path.dirname(__file__), 'some-test-plugins'))
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
self.added_path = os.path.join(os.path.dirname(__file__), 'more-test-plugins')
self.added_path = os.path.join(os.path.dirname(__file__), 'some-test-plugins')
sys.path.append(self.added_path)
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix test case for multiple locations of a namespace<commit_after>#!/usr/bin/env python
import sys
import os
import unittest
from straight.plugin.loader import StraightPluginLoader
class PluginTestCase(unittest.TestCase):
def setUp(self):
self.loader = StraightPluginLoader()
sys.path.append(os.path.join(os.path.dirname(__file__), 'more-test-plugins'))
sys.path.append(os.path.join(os.path.dirname(__file__), 'some-test-plugins'))
def tearDown(self):
del sys.path[-1]
del sys.path[-1]
def test_load(self):
modules = list(self.loader.load('testplugin'))
assert len(modules) == 2, modules
def test_plugin(self):
assert self.loader.load('testplugin')[0].do(1) == 2
if __name__ == '__main__':
unittest.main()
|
9f994fdcc29e290b98c0938ce9e8c32dc5f8adee
|
neuroimaging/algorithms/statistics/__init__.py
|
neuroimaging/algorithms/statistics/__init__.py
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
Fix test funcs in algorithms packaging.
|
Fix test funcs in algorithms packaging.
|
Python
|
bsd-3-clause
|
musically-ut/statsmodels,bsipocz/statsmodels,hlin117/statsmodels,waynenilsen/statsmodels,statsmodels/statsmodels,bzero/statsmodels,pprett/statsmodels,yl565/statsmodels,ChadFulton/statsmodels,musically-ut/statsmodels,pprett/statsmodels,adammenges/statsmodels,kiyoto/statsmodels,bashtage/statsmodels,alekz112/statsmodels,statsmodels/statsmodels,cbmoore/statsmodels,edhuckle/statsmodels,saketkc/statsmodels,bashtage/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,alekz112/statsmodels,adammenges/statsmodels,nvoron23/statsmodels,YihaoLu/statsmodels,kiyoto/statsmodels,kiyoto/statsmodels,DonBeo/statsmodels,jstoxrocky/statsmodels,saketkc/statsmodels,wesm/statsmodels,bzero/statsmodels,yl565/statsmodels,hlin117/statsmodels,waynenilsen/statsmodels,DonBeo/statsmodels,DonBeo/statsmodels,wdurhamh/statsmodels,jstoxrocky/statsmodels,waynenilsen/statsmodels,jseabold/statsmodels,bashtage/statsmodels,pprett/statsmodels,yarikoptic/pystatsmodels,nguyentu1602/statsmodels,statsmodels/statsmodels,adammenges/statsmodels,saketkc/statsmodels,detrout/debian-statsmodels,josef-pkt/statsmodels,wdurhamh/statsmodels,phobson/statsmodels,alekz112/statsmodels,edhuckle/statsmodels,cbmoore/statsmodels,bzero/statsmodels,wwf5067/statsmodels,bert9bert/statsmodels,alekz112/statsmodels,cbmoore/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,wwf5067/statsmodels,nguyentu1602/statsmodels,rgommers/statsmodels,hainm/statsmodels,bashtage/statsmodels,wzbozon/statsmodels,nvoron23/statsmodels,wkfwkf/statsmodels,wzbozon/statsmodels,phobson/statsmodels,wkfwkf/statsmodels,yarikoptic/pystatsmodels,huongttlan/statsmodels,detrout/debian-statsmodels,cbmoore/statsmodels,DonBeo/statsmodels,wzbozon/statsmodels,yarikoptic/pystatsmodels,adammenges/statsmodels,hlin117/statsmodels,josef-pkt/statsmodels,phobson/statsmodels,bzero/statsmodels,YihaoLu/statsmodels,ChadFulton/statsmodels,astocko/statsmodels,saketkc/statsmodels,huongttlan/statsmodels,rgommers/statsmodels,hainm/statsmodels,bert9bert/statsmodels,jseabold/statsmodels,pprett/statsmodels,Averroes/statsmodels,nguyentu1602/statsmodels,gef756/statsmodels,bsipocz/statsmodels,musically-ut/statsmodels,statsmodels/statsmodels,DonBeo/statsmodels,astocko/statsmodels,ChadFulton/statsmodels,musically-ut/statsmodels,nguyentu1602/statsmodels,saketkc/statsmodels,wzbozon/statsmodels,bashtage/statsmodels,nvoron23/statsmodels,rgommers/statsmodels,YihaoLu/statsmodels,bert9bert/statsmodels,jstoxrocky/statsmodels,bavardage/statsmodels,bsipocz/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,bavardage/statsmodels,ChadFulton/statsmodels,bzero/statsmodels,bert9bert/statsmodels,nvoron23/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,jstoxrocky/statsmodels,wwf5067/statsmodels,astocko/statsmodels,edhuckle/statsmodels,bavardage/statsmodels,kiyoto/statsmodels,hainm/statsmodels,waynenilsen/statsmodels,YihaoLu/statsmodels,bavardage/statsmodels,wzbozon/statsmodels,phobson/statsmodels,jseabold/statsmodels,gef756/statsmodels,jseabold/statsmodels,yl565/statsmodels,edhuckle/statsmodels,detrout/debian-statsmodels,jseabold/statsmodels,nvoron23/statsmodels,cbmoore/statsmodels,phobson/statsmodels,detrout/debian-statsmodels,ChadFulton/statsmodels,yl565/statsmodels,bavardage/statsmodels,bert9bert/statsmodels,wesm/statsmodels,wkfwkf/statsmodels,hainm/statsmodels,wdurhamh/statsmodels,kiyoto/statsmodels,edhuckle/statsmodels,wwf5067/statsmodels,Averroes/statsmodels,gef756/statsmodels,YihaoLu/statsmodels,ChadFulton/statsmodels,yl565/statsmodels,bsipocz/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,astocko/statsmodels,wkfwkf/statsmodels,huongttlan/statsmodels,rgommers/statsmodels,josef-pkt/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,huongttlan/statsmodels,Averroes/statsmodels,wesm/statsmodels,hlin117/statsmodels,bashtage/statsmodels
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
Fix test funcs in algorithms packaging.
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
<commit_before>"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
<commit_msg>Fix test funcs in algorithms packaging.<commit_after>
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
Fix test funcs in algorithms packaging."""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
<commit_before>"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.testutils import set_flags
set_flags(flags)
from neuroimaging.testing import *
return NumpyTest().test(level, verbosity)
<commit_msg>Fix test funcs in algorithms packaging.<commit_after>"""
TODO
"""
__docformat__ = 'restructuredtext'
import intrinsic_volumes, rft
from neuroimaging.testing import Tester
test = Tester().test
bench = Tester().bench
|
57cbab12a2a6d7e439fca86640ab9c721fbcc62d
|
python/xchainer/testing/array.py
|
python/xchainer/testing/array.py
|
import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
|
import pytest
import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
assert x.strides == y.strides
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
assert not x.is_grad_required()
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
assert not x.is_grad_required()
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
|
Add strides and is_grad_required() check
|
Add strides and is_grad_required() check
|
Python
|
mit
|
wkentaro/chainer,hvy/chainer,pfnet/chainer,ktnyt/chainer,jnishi/chainer,ktnyt/chainer,ktnyt/chainer,chainer/chainer,okuta/chainer,wkentaro/chainer,okuta/chainer,keisuke-umezawa/chainer,wkentaro/chainer,okuta/chainer,hvy/chainer,jnishi/chainer,jnishi/chainer,niboshi/chainer,tkerola/chainer,chainer/chainer,keisuke-umezawa/chainer,hvy/chainer,okuta/chainer,niboshi/chainer,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,niboshi/chainer,jnishi/chainer,keisuke-umezawa/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,ktnyt/chainer
|
import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
Add strides and is_grad_required() check
|
import pytest
import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
assert x.strides == y.strides
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
assert not x.is_grad_required()
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
assert not x.is_grad_required()
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
|
<commit_before>import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
<commit_msg>Add strides and is_grad_required() check<commit_after>
|
import pytest
import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
assert x.strides == y.strides
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
assert not x.is_grad_required()
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
assert not x.is_grad_required()
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
|
import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
Add strides and is_grad_required() checkimport pytest
import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
assert x.strides == y.strides
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
assert not x.is_grad_required()
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
assert not x.is_grad_required()
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
|
<commit_before>import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
<commit_msg>Add strides and is_grad_required() check<commit_after>import pytest
import numpy.testing
import xchainer
# NumPy-like assertion functions that accept both NumPy and xChainer arrays
def assert_array_equal(x, y, rtol=1e-7, atol=0, err_msg='', verbose=True):
"""Raises an AssertionError if two array_like objects are not equal.
Args:
x(numpy.ndarray or xchainer.Array): The actual object to check.
y(numpy.ndarray or xchainer.Array): The desired, expected object.
err_msg(str): The error message to be printed in case of failure.
verbose(bool): If ``True``, the conflicting values
are appended to the error message.
.. seealso:: :func:`numpy.testing.assert_allclose`
"""
assert x.strides == y.strides
# TODO(sonots): Remove following explicit `to_device` transfer if conversion from
# xchainer.Array to numpy.ndarray via buffer protocol supports the device transfer.
if isinstance(x, xchainer.Array):
assert not x.is_grad_required()
x = x.to_device('native:0')
if isinstance(y, xchainer.Array):
assert not x.is_grad_required()
y = y.to_device('native:0')
numpy.testing.assert_allclose(
x, y, rtol, atol, err_msg=err_msg, verbose=verbose)
|
90f80bf6dd8a6fe0b15f802465cdcf759bd7b8ff
|
simplespamblocker/forms.py
|
simplespamblocker/forms.py
|
# -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error as e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
|
# -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error, e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
|
Fix syntax error for python 2.5
|
Fix syntax error for python 2.5
|
Python
|
bsd-3-clause
|
moqada/django-simple-spam-blocker
|
# -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error as e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
Fix syntax error for python 2.5
|
# -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error, e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
|
<commit_before># -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error as e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
<commit_msg>Fix syntax error for python 2.5<commit_after>
|
# -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error, e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
|
# -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error as e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
Fix syntax error for python 2.5# -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error, e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
|
<commit_before># -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error as e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
<commit_msg>Fix syntax error for python 2.5<commit_after># -*- coding: utf-8 -*-
import re
from django import forms
class ValidRegexField(forms.CharField):
def clean(self, value):
value = super(ValidRegexField, self).clean(value)
if value:
try:
re.compile(value)
except re.error, e:
raise forms.ValidationError('Please input valid regexp: %s' % e)
return value
|
27967818b58b2630a6282999e7b39af618716f91
|
scheduler.py
|
scheduler.py
|
import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
sched.start()
|
import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
# Use RUN_ONCE to only run the destalinate job once immediately
if os.getenv("RUN_ONCE"):
destalinate_job()
else:
sched.start()
|
Use RUN_ONCE to only run the destalinate job once immediately
|
Use RUN_ONCE to only run the destalinate job once immediately
|
Python
|
apache-2.0
|
TheConnMan/destalinator,TheConnMan/destalinator,royrapoport/destalinator,randsleadershipslack/destalinator,royrapoport/destalinator,randsleadershipslack/destalinator
|
import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
sched.start()
Use RUN_ONCE to only run the destalinate job once immediately
|
import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
# Use RUN_ONCE to only run the destalinate job once immediately
if os.getenv("RUN_ONCE"):
destalinate_job()
else:
sched.start()
|
<commit_before>import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
sched.start()
<commit_msg>Use RUN_ONCE to only run the destalinate job once immediately<commit_after>
|
import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
# Use RUN_ONCE to only run the destalinate job once immediately
if os.getenv("RUN_ONCE"):
destalinate_job()
else:
sched.start()
|
import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
sched.start()
Use RUN_ONCE to only run the destalinate job once immediatelyimport logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
# Use RUN_ONCE to only run the destalinate job once immediately
if os.getenv("RUN_ONCE"):
destalinate_job()
else:
sched.start()
|
<commit_before>import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
sched.start()
<commit_msg>Use RUN_ONCE to only run the destalinate job once immediately<commit_after>import logging
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
from config import Config
_config = Config()
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if _config.test_schedule:
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if not _config.sb_token or not _config.api_token:
logging.error(
"Missing at least one required Slack environment variable.\n"
"Make sure to set DESTALINATOR_SB_TOKEN and DESTALINATOR_API_TOKEN."
)
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not _config.sentry_dsn:
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
# Use RUN_ONCE to only run the destalinate job once immediately
if os.getenv("RUN_ONCE"):
destalinate_job()
else:
sched.start()
|
0e07e0127e359cbf6c97d6f470fb51d15d7544bc
|
scripts/utils.py
|
scripts/utils.py
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(
obj, file, ensure_ascii=False, sort_keys=True, indent='\t',
separators=(',', ': ')
)
file.write('\n')
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_dump_params)
file.write('\n')
|
Move JSON dump parameters to a global dictionary.
|
scripts: Move JSON dump parameters to a global dictionary.
|
Python
|
unlicense
|
VBChunguk/thcrap,VBChunguk/thcrap,thpatch/thcrap,VBChunguk/thcrap,thpatch/thcrap,thpatch/thcrap,thpatch/thcrap,thpatch/thcrap
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(
obj, file, ensure_ascii=False, sort_keys=True, indent='\t',
separators=(',', ': ')
)
file.write('\n')
scripts: Move JSON dump parameters to a global dictionary.
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_dump_params)
file.write('\n')
|
<commit_before>#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(
obj, file, ensure_ascii=False, sort_keys=True, indent='\t',
separators=(',', ': ')
)
file.write('\n')
<commit_msg>scripts: Move JSON dump parameters to a global dictionary.<commit_after>
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_dump_params)
file.write('\n')
|
#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(
obj, file, ensure_ascii=False, sort_keys=True, indent='\t',
separators=(',', ': ')
)
file.write('\n')
scripts: Move JSON dump parameters to a global dictionary.#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_dump_params)
file.write('\n')
|
<commit_before>#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(
obj, file, ensure_ascii=False, sort_keys=True, indent='\t',
separators=(',', ': ')
)
file.write('\n')
<commit_msg>scripts: Move JSON dump parameters to a global dictionary.<commit_after>#!/usr/bin/env python3
# Touhou Community Reliant Automatic Patcher
# Scripts
#
# ----
#
"""Utility functions shared among all the scripts."""
import json
import os
json_dump_params = {
'ensure_ascii': False,
'indent': '\t',
'separators': (',', ': '),
'sort_keys': True
}
# Default parameters for JSON input and output
def json_load(fn):
with open(fn, 'r', encoding='utf-8') as file:
return json.load(file)
def json_store(fn, obj, dirs=['']):
"""Saves the JSON object [obj] to [fn], creating all necessary
directories in the process. If [dirs] is given, the function is
executed for every root directory in the array."""
for i in dirs:
full_fn = os.path.join(i, fn)
os.makedirs(os.path.dirname(full_fn), exist_ok=True)
with open(full_fn, 'w', encoding='utf-8') as file:
json.dump(obj, file, **json_dump_params)
file.write('\n')
|
66aa43a5e8963c440261128e5b317679d01917e6
|
server/routes.py
|
server/routes.py
|
from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 201
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
|
from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 200
@app.route('/msg_to', methods=['POST'])
def msg_to():
if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Waiting on Steve's commit
return "", 200
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
|
Return header fix and msg_to route
|
Return header fix and msg_to route
|
Python
|
mit
|
stevex86/RandomActsOfKindness,stevex86/RandomActsOfKindness
|
from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 201
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
Return header fix and msg_to route
|
from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 200
@app.route('/msg_to', methods=['POST'])
def msg_to():
if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Waiting on Steve's commit
return "", 200
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
|
<commit_before>from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 201
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
<commit_msg>Return header fix and msg_to route<commit_after>
|
from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 200
@app.route('/msg_to', methods=['POST'])
def msg_to():
if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Waiting on Steve's commit
return "", 200
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
|
from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 201
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
Return header fix and msg_to routefrom __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 200
@app.route('/msg_to', methods=['POST'])
def msg_to():
if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Waiting on Steve's commit
return "", 200
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
|
<commit_before>from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 201
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
<commit_msg>Return header fix and msg_to route<commit_after>from __init__ import app, db
from subprocess import call
from models import User
from flask import request
from flask import abort
from flask import jsonify
@app.route('/register', methods=['POST'])
def register():
if not request.json or not 'guid' in request.json:
abort(400) # Malformed Packet
guid = request.json['guid']
user = User(guid)
db.session.add(user)
db.session.commit()
registerObject = {
'id': user.guid
}
return jsonify(registerObject), 201
@app.route('/phone', methods=['POST'])
def phone():
if not request.json or (not ('call-time' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Todo Steve will do this
return "", 200
@app.route('/msg_to', methods=['POST'])
def msg_to():
if not request.json or (not ('phone_number' in request.json)) or (not ('id' in request.json)):
abort(400) # Malformed Packet
user = User.query.filter_by(id=request.json["id"]).first()
if not user: #Check database for id to make sure it exists
abort(401)
# Waiting on Steve's commit
return "", 200
@app.route('/')
def landing_page():
return 'Nothing seems to be here'
@app.route('/update-server', methods=['GET', 'POST'])
def update():
call(["git pull"], shell=True)
return 'Success!'
|
550fedc513aab5feec3aaf43a49df5082a1e5dda
|
incuna_test_utils/testcases/urls.py
|
incuna_test_utils/testcases/urls.py
|
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLsMixinBase(object):
"""A TestCase Mixin with a check_url helper method for testing urls"""
def check_url(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the correct view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
self.assertViewNames(view, expected_url)
def assertViewNames(self, view, expected_url):
"""
Assert that the view method/class that the URL resolves to is the
correct one.
"""
raise NotImplementedError
class URLsMixinForViewMethod(URLsMixinBase):
"""For testing method-based views."""
def assertViewNames(self, view_method, expected_url):
resolved_view_method = resolve(expected_url).func
self.assertEqual(resolved_view_method.__name__, view_method.__name__)
class URLsMixinREST(URLsMixinBase):
"""For testing class-based views."""
def assertViewNames(self, view_class, expected_url):
resolved_view_class = resolve(expected_url).func.cls
self.assertEqual(resolved_view_class, view_class)
class URLsMixin(URLsMixinREST):
"""For backwards compatibility."""
def __init__(self, *args, **kwargs):
warnings.warn(
'URLsMixin is deprecated; use URLsMixinREST instead.',
DeprecationWarning)
super(URLsMixin, self).__init__(*args, **kwargs)
class URLsTestCase(URLsMixin, TestCase):
"""For backwards compatibility. Deprecated in v0.6."""
class URLsTestCaseREST(URLsMixinREST, TestCase):
"""Tests class-based REST Framework views."""
class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase):
"""Tests (non-REST) views defined by view methods."""
|
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLTestMixin(object):
def assert_url_matches_view(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the expected view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
resolved_view = resolve(expected_url).func
if hasattr(view, 'cls'):
self.assertEqual(resolved_view.cls, view)
else:
self.assertEqual(resolved_view.__name__, view.__name__)
class URLTestCase(URLTestMixin, TestCase):
pass
|
Add simple URLTestMixin and URLTestCase classes
|
Add simple URLTestMixin and URLTestCase classes
* Remove old mixins and testcases
|
Python
|
bsd-2-clause
|
incuna/incuna-test-utils,incuna/incuna-test-utils
|
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLsMixinBase(object):
"""A TestCase Mixin with a check_url helper method for testing urls"""
def check_url(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the correct view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
self.assertViewNames(view, expected_url)
def assertViewNames(self, view, expected_url):
"""
Assert that the view method/class that the URL resolves to is the
correct one.
"""
raise NotImplementedError
class URLsMixinForViewMethod(URLsMixinBase):
"""For testing method-based views."""
def assertViewNames(self, view_method, expected_url):
resolved_view_method = resolve(expected_url).func
self.assertEqual(resolved_view_method.__name__, view_method.__name__)
class URLsMixinREST(URLsMixinBase):
"""For testing class-based views."""
def assertViewNames(self, view_class, expected_url):
resolved_view_class = resolve(expected_url).func.cls
self.assertEqual(resolved_view_class, view_class)
class URLsMixin(URLsMixinREST):
"""For backwards compatibility."""
def __init__(self, *args, **kwargs):
warnings.warn(
'URLsMixin is deprecated; use URLsMixinREST instead.',
DeprecationWarning)
super(URLsMixin, self).__init__(*args, **kwargs)
class URLsTestCase(URLsMixin, TestCase):
"""For backwards compatibility. Deprecated in v0.6."""
class URLsTestCaseREST(URLsMixinREST, TestCase):
"""Tests class-based REST Framework views."""
class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase):
"""Tests (non-REST) views defined by view methods."""
Add simple URLTestMixin and URLTestCase classes
* Remove old mixins and testcases
|
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLTestMixin(object):
def assert_url_matches_view(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the expected view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
resolved_view = resolve(expected_url).func
if hasattr(view, 'cls'):
self.assertEqual(resolved_view.cls, view)
else:
self.assertEqual(resolved_view.__name__, view.__name__)
class URLTestCase(URLTestMixin, TestCase):
pass
|
<commit_before>import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLsMixinBase(object):
"""A TestCase Mixin with a check_url helper method for testing urls"""
def check_url(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the correct view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
self.assertViewNames(view, expected_url)
def assertViewNames(self, view, expected_url):
"""
Assert that the view method/class that the URL resolves to is the
correct one.
"""
raise NotImplementedError
class URLsMixinForViewMethod(URLsMixinBase):
"""For testing method-based views."""
def assertViewNames(self, view_method, expected_url):
resolved_view_method = resolve(expected_url).func
self.assertEqual(resolved_view_method.__name__, view_method.__name__)
class URLsMixinREST(URLsMixinBase):
"""For testing class-based views."""
def assertViewNames(self, view_class, expected_url):
resolved_view_class = resolve(expected_url).func.cls
self.assertEqual(resolved_view_class, view_class)
class URLsMixin(URLsMixinREST):
"""For backwards compatibility."""
def __init__(self, *args, **kwargs):
warnings.warn(
'URLsMixin is deprecated; use URLsMixinREST instead.',
DeprecationWarning)
super(URLsMixin, self).__init__(*args, **kwargs)
class URLsTestCase(URLsMixin, TestCase):
"""For backwards compatibility. Deprecated in v0.6."""
class URLsTestCaseREST(URLsMixinREST, TestCase):
"""Tests class-based REST Framework views."""
class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase):
"""Tests (non-REST) views defined by view methods."""
<commit_msg>Add simple URLTestMixin and URLTestCase classes
* Remove old mixins and testcases<commit_after>
|
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLTestMixin(object):
def assert_url_matches_view(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the expected view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
resolved_view = resolve(expected_url).func
if hasattr(view, 'cls'):
self.assertEqual(resolved_view.cls, view)
else:
self.assertEqual(resolved_view.__name__, view.__name__)
class URLTestCase(URLTestMixin, TestCase):
pass
|
import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLsMixinBase(object):
"""A TestCase Mixin with a check_url helper method for testing urls"""
def check_url(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the correct view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
self.assertViewNames(view, expected_url)
def assertViewNames(self, view, expected_url):
"""
Assert that the view method/class that the URL resolves to is the
correct one.
"""
raise NotImplementedError
class URLsMixinForViewMethod(URLsMixinBase):
"""For testing method-based views."""
def assertViewNames(self, view_method, expected_url):
resolved_view_method = resolve(expected_url).func
self.assertEqual(resolved_view_method.__name__, view_method.__name__)
class URLsMixinREST(URLsMixinBase):
"""For testing class-based views."""
def assertViewNames(self, view_class, expected_url):
resolved_view_class = resolve(expected_url).func.cls
self.assertEqual(resolved_view_class, view_class)
class URLsMixin(URLsMixinREST):
"""For backwards compatibility."""
def __init__(self, *args, **kwargs):
warnings.warn(
'URLsMixin is deprecated; use URLsMixinREST instead.',
DeprecationWarning)
super(URLsMixin, self).__init__(*args, **kwargs)
class URLsTestCase(URLsMixin, TestCase):
"""For backwards compatibility. Deprecated in v0.6."""
class URLsTestCaseREST(URLsMixinREST, TestCase):
"""Tests class-based REST Framework views."""
class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase):
"""Tests (non-REST) views defined by view methods."""
Add simple URLTestMixin and URLTestCase classes
* Remove old mixins and testcasesimport warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLTestMixin(object):
def assert_url_matches_view(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the expected view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
resolved_view = resolve(expected_url).func
if hasattr(view, 'cls'):
self.assertEqual(resolved_view.cls, view)
else:
self.assertEqual(resolved_view.__name__, view.__name__)
class URLTestCase(URLTestMixin, TestCase):
pass
|
<commit_before>import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLsMixinBase(object):
"""A TestCase Mixin with a check_url helper method for testing urls"""
def check_url(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the correct view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
self.assertViewNames(view, expected_url)
def assertViewNames(self, view, expected_url):
"""
Assert that the view method/class that the URL resolves to is the
correct one.
"""
raise NotImplementedError
class URLsMixinForViewMethod(URLsMixinBase):
"""For testing method-based views."""
def assertViewNames(self, view_method, expected_url):
resolved_view_method = resolve(expected_url).func
self.assertEqual(resolved_view_method.__name__, view_method.__name__)
class URLsMixinREST(URLsMixinBase):
"""For testing class-based views."""
def assertViewNames(self, view_class, expected_url):
resolved_view_class = resolve(expected_url).func.cls
self.assertEqual(resolved_view_class, view_class)
class URLsMixin(URLsMixinREST):
"""For backwards compatibility."""
def __init__(self, *args, **kwargs):
warnings.warn(
'URLsMixin is deprecated; use URLsMixinREST instead.',
DeprecationWarning)
super(URLsMixin, self).__init__(*args, **kwargs)
class URLsTestCase(URLsMixin, TestCase):
"""For backwards compatibility. Deprecated in v0.6."""
class URLsTestCaseREST(URLsMixinREST, TestCase):
"""Tests class-based REST Framework views."""
class URLsTestCaseViewMethod(URLsMixinForViewMethod, TestCase):
"""Tests (non-REST) views defined by view methods."""
<commit_msg>Add simple URLTestMixin and URLTestCase classes
* Remove old mixins and testcases<commit_after>import warnings
from django.core.urlresolvers import resolve, reverse
from django.test import TestCase
class URLTestMixin(object):
def assert_url_matches_view(self, view, expected_url, url_name,
url_args=None, url_kwargs=None):
"""
Assert a view's url is correctly configured
Check the url_name reverses to give a correctly formated expected_url.
Check the expected_url resolves to the expected view.
"""
reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs)
self.assertEqual(reversed_url, expected_url)
resolved_view = resolve(expected_url).func
if hasattr(view, 'cls'):
self.assertEqual(resolved_view.cls, view)
else:
self.assertEqual(resolved_view.__name__, view.__name__)
class URLTestCase(URLTestMixin, TestCase):
pass
|
e4ab52fc36b9d4e0805fb134d43bf63fb73a62d8
|
shcol/cli.py
|
shcol/cli.py
|
from __future__ import print_function
import argparse
import shcol
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument('items', nargs='+', help='the items to columnize')
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', help='sort the items',
action='store_true', default=False
)
print(shcol.columnize(args.items, args.spacing, args.width, args.sort))
|
from __future__ import print_function
import argparse
import shcol
import sys
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument(
'items', nargs='*', metavar='item', help='an item to columnize'
)
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', action='store_true', default=False,
help='sort the items'
)
args = parser.parse_args(cmd_args[1:])
items = args.items or [line.rstrip('\n') for line in sys.stdin]
print(shcol.columnize(items, args.spacing, args.width, args.sort))
|
Read from Stdin when no items are passed.
|
Read from Stdin when no items are passed.
|
Python
|
bsd-2-clause
|
seblin/shcol
|
from __future__ import print_function
import argparse
import shcol
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument('items', nargs='+', help='the items to columnize')
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', help='sort the items',
action='store_true', default=False
)
print(shcol.columnize(args.items, args.spacing, args.width, args.sort))
Read from Stdin when no items are passed.
|
from __future__ import print_function
import argparse
import shcol
import sys
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument(
'items', nargs='*', metavar='item', help='an item to columnize'
)
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', action='store_true', default=False,
help='sort the items'
)
args = parser.parse_args(cmd_args[1:])
items = args.items or [line.rstrip('\n') for line in sys.stdin]
print(shcol.columnize(items, args.spacing, args.width, args.sort))
|
<commit_before>from __future__ import print_function
import argparse
import shcol
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument('items', nargs='+', help='the items to columnize')
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', help='sort the items',
action='store_true', default=False
)
print(shcol.columnize(args.items, args.spacing, args.width, args.sort))
<commit_msg>Read from Stdin when no items are passed.<commit_after>
|
from __future__ import print_function
import argparse
import shcol
import sys
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument(
'items', nargs='*', metavar='item', help='an item to columnize'
)
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', action='store_true', default=False,
help='sort the items'
)
args = parser.parse_args(cmd_args[1:])
items = args.items or [line.rstrip('\n') for line in sys.stdin]
print(shcol.columnize(items, args.spacing, args.width, args.sort))
|
from __future__ import print_function
import argparse
import shcol
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument('items', nargs='+', help='the items to columnize')
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', help='sort the items',
action='store_true', default=False
)
print(shcol.columnize(args.items, args.spacing, args.width, args.sort))
Read from Stdin when no items are passed.from __future__ import print_function
import argparse
import shcol
import sys
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument(
'items', nargs='*', metavar='item', help='an item to columnize'
)
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', action='store_true', default=False,
help='sort the items'
)
args = parser.parse_args(cmd_args[1:])
items = args.items or [line.rstrip('\n') for line in sys.stdin]
print(shcol.columnize(items, args.spacing, args.width, args.sort))
|
<commit_before>from __future__ import print_function
import argparse
import shcol
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument('items', nargs='+', help='the items to columnize')
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', help='sort the items',
action='store_true', default=False
)
print(shcol.columnize(args.items, args.spacing, args.width, args.sort))
<commit_msg>Read from Stdin when no items are passed.<commit_after>from __future__ import print_function
import argparse
import shcol
import sys
__all__ = ['main']
def main(cmd_args):
parser = argparse.ArgumentParser(
description='Generate columnized output for given string items.',
version='shcol {}'.format(shcol.__version__)
)
parser.add_argument(
'items', nargs='*', metavar='item', help='an item to columnize'
)
parser.add_argument(
'-s', '--spacing', metavar='N', type=int, default=2,
help='number of blanks between two columns (default: 2)'
)
parser.add_argument(
'-w', '--width', metavar='N', type=int, default=80,
help='maximal amount of characters per line (default: 80)'
)
parser.add_argument(
'-S', '--sort', action='store_true', default=False,
help='sort the items'
)
args = parser.parse_args(cmd_args[1:])
items = args.items or [line.rstrip('\n') for line in sys.stdin]
print(shcol.columnize(items, args.spacing, args.width, args.sort))
|
7a20ee42aae2d2a6f5766ab4ec1ee4ef33fe14c8
|
madam_rest/__init__.py
|
madam_rest/__init__.py
|
from flask import Flask
from madam import Madam
app = Flask(__name__)
app.from_object('config')
asset_manager = Madam()
asset_storage = app.config['ASSET_STORAGE']
from madam_rest import views
|
import madam
from flask import Flask
app = Flask(__name__)
app.from_object('config')
asset_manager = madam.Madam()
asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH'])
from madam_rest import views
|
Create shelve asset storage by default.
|
Create shelve asset storage by default.
|
Python
|
agpl-3.0
|
eseifert/madam-rest
|
from flask import Flask
from madam import Madam
app = Flask(__name__)
app.from_object('config')
asset_manager = Madam()
asset_storage = app.config['ASSET_STORAGE']
from madam_rest import views
Create shelve asset storage by default.
|
import madam
from flask import Flask
app = Flask(__name__)
app.from_object('config')
asset_manager = madam.Madam()
asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH'])
from madam_rest import views
|
<commit_before>from flask import Flask
from madam import Madam
app = Flask(__name__)
app.from_object('config')
asset_manager = Madam()
asset_storage = app.config['ASSET_STORAGE']
from madam_rest import views
<commit_msg>Create shelve asset storage by default.<commit_after>
|
import madam
from flask import Flask
app = Flask(__name__)
app.from_object('config')
asset_manager = madam.Madam()
asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH'])
from madam_rest import views
|
from flask import Flask
from madam import Madam
app = Flask(__name__)
app.from_object('config')
asset_manager = Madam()
asset_storage = app.config['ASSET_STORAGE']
from madam_rest import views
Create shelve asset storage by default.import madam
from flask import Flask
app = Flask(__name__)
app.from_object('config')
asset_manager = madam.Madam()
asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH'])
from madam_rest import views
|
<commit_before>from flask import Flask
from madam import Madam
app = Flask(__name__)
app.from_object('config')
asset_manager = Madam()
asset_storage = app.config['ASSET_STORAGE']
from madam_rest import views
<commit_msg>Create shelve asset storage by default.<commit_after>import madam
from flask import Flask
app = Flask(__name__)
app.from_object('config')
asset_manager = madam.Madam()
asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH'])
from madam_rest import views
|
5b931f92b0f8f65306ced9cf049e2d1089c43860
|
fantail/tests/test_staticsite.py
|
fantail/tests/test_staticsite.py
|
"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert os.path.isdir(os.path.join(path, 'output', '.git'))
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
|
"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
|
Remove assertion in test that should not have made it in
|
Remove assertion in test that should not have made it in
|
Python
|
bsd-2-clause
|
sjkingo/fantail,sjkingo/fantail,sjkingo/fantail
|
"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert os.path.isdir(os.path.join(path, 'output', '.git'))
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
Remove assertion in test that should not have made it in
|
"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
|
<commit_before>"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert os.path.isdir(os.path.join(path, 'output', '.git'))
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
<commit_msg>Remove assertion in test that should not have made it in<commit_after>
|
"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
|
"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert os.path.isdir(os.path.join(path, 'output', '.git'))
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
Remove assertion in test that should not have made it in"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
|
<commit_before>"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert os.path.isdir(os.path.join(path, 'output', '.git'))
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
<commit_msg>Remove assertion in test that should not have made it in<commit_after>"""
Tests for staticsite.py - the static site generator
"""
import os.path
import pytest
from fantail.staticsite import StaticSite
def test_init(tmpdir, caplog):
# Verify path does not exist
path = str(tmpdir.join('test-site'))
assert not os.path.isdir(path)
# Create the site
site = StaticSite(path)
site.init_site()
# Verify directories have been created
assert path == site.path
assert os.path.isdir(path)
assert 'Welcome from' in caplog.text()
assert str(repr(site)) == '<StaticSite "' + path + '">'
def test_dir_properties(tmpdir):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
assert site.template_dir == os.path.join(path, 'templates')
assert site.pages_dir == os.path.join(path, 'pages')
assert site.output_dir == os.path.join(path, 'output')
def test_site_clean(tmpdir, caplog):
path = str(tmpdir.join('test-site'))
site = StaticSite(path)
# This should fail as init() was not called first
with pytest.raises(SystemExit):
site.clean_site()
assert 'Site at ' + path + ' does not exist. Please' in caplog.text()
site.init_site()
# This should succeed now
site.clean_site()
assert 'Removed output directory from' in caplog.text()
|
d67257dfe124d74d40d1dbe8bf881df27a07bf2c
|
needlestack/connections.py
|
needlestack/connections.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
from . import base
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def get_all_indexes(self):
base._load_all_indexes()
return base._get_all_indexes()
def get_index_by_name(self, name):
base._load_all_indexes()
return base._get_index_by_name(name)
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
|
Add helpers for obtain indexes to connection manager.
|
Add helpers for obtain indexes to connection manager.
|
Python
|
bsd-3-clause
|
niwinz/needlestack
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
Add helpers for obtain indexes to connection manager.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
from . import base
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def get_all_indexes(self):
base._load_all_indexes()
return base._get_all_indexes()
def get_index_by_name(self, name):
base._load_all_indexes()
return base._get_index_by_name(name)
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
<commit_msg>Add helpers for obtain indexes to connection manager.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
from . import base
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def get_all_indexes(self):
base._load_all_indexes()
return base._get_all_indexes()
def get_index_by_name(self, name):
base._load_all_indexes()
return base._get_index_by_name(name)
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
Add helpers for obtain indexes to connection manager.# -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
from . import base
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def get_all_indexes(self):
base._load_all_indexes()
return base._get_all_indexes()
def get_index_by_name(self, name):
base._load_all_indexes()
return base._get_index_by_name(name)
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
<commit_msg>Add helpers for obtain indexes to connection manager.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals, absolute_import
from threading import local
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from . import utils
from . import base
class ConnectionManager(object):
def __init__(self):
self._connections = local()
def load_backend(self, alias="default"):
try:
conf = getattr(settings, "NEEDLESTACK_CONNECTIONS")
except AttributeError as e:
raise ImproperlyConfigured("needlestack not configured") from e
if alias not in conf:
raise ImproperlyConfigured("connection with alias {0} "
"does not exists".format(alias))
_conf = conf[alias]
cls = utils.load_class(_conf["engine"])
params = _conf["options"]
return (cls, params)
def get_connection(self, alias="default"):
if hasattr(self._connections, alias):
return getattr(self._connections, alias)
cls, params = self.load_backend(alias)
instance = cls(**params)
setattr(self._connections, alias, instance)
return instance
def get_all_indexes(self):
base._load_all_indexes()
return base._get_all_indexes()
def get_index_by_name(self, name):
base._load_all_indexes()
return base._get_index_by_name(name)
def __getattr__(self, alias):
return self.get_connection(alias)
manager = ConnectionManager()
|
a849544beed5b2ef717345c1de467382f95f804a
|
githubsetupircnotifications.py
|
githubsetupircnotifications.py
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
|
Add more events to listen to
|
Add more events to listen to
|
Python
|
mit
|
kragniz/github-setup-irc-notifications
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
Add more events to listen to
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
|
<commit_before>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
<commit_msg>Add more events to listen to<commit_after>
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
|
"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
Add more events to listen to"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
|
<commit_before>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
for r in org.iter_repos():
r.create_hook('irc', conf)
<commit_msg>Add more events to listen to<commit_after>"""
github-setup-irc-notifications - Configure all repositories in an organization
with irc notifications
"""
import argparse
import getpass
import sys
import github3
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--username')
parser.add_argument('--password')
parser.add_argument('--org')
parser.add_argument('--channel')
args = parser.parse_args()
if args.password is None:
password = getpass.getpass(
'Password for github user "{}":'.format(args.username))
else:
password = args.password
github = github3.login(args.username, password=password)
if github is None:
print('Failed to sign into github')
sys.exit(1)
org = github.organization(args.org)
if org is None:
print('Organization "{}" does not appear to exist'.format(args.org))
sys.exit(1)
conf = {'nickserv_password': '',
'no_colors': '0',
'password': '',
'branch_regexes': '',
'room': args.channel,
'ssl': '0',
'port': '',
'branches': '',
'server': 'chat.freenode.net',
'long_url': '0',
'notice': '0',
'message_without_join': '1',
'nick': 'github'
}
events = [
'push',
'delete',
'create',
'issues',
'pull_request'
]
for r in org.iter_repos():
r.create_hook('irc', conf, events=events)
|
777eeaf61c256f04031d87995b4bccd7a93f1182
|
lg_mirror/test/test_mirror_scene.py
|
lg_mirror/test/test_mirror_scene.py
|
#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
},
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "right_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
|
#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
|
Update mirror test scene for single activity
|
Update mirror test scene for single activity
|
Python
|
apache-2.0
|
EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes
|
#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
},
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "right_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
Update mirror test scene for single activity
|
#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
|
<commit_before>#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
},
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "right_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
<commit_msg>Update mirror test scene for single activity<commit_after>
|
#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
|
#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
},
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "right_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
Update mirror test scene for single activity#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
|
<commit_before>#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
},
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "right_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
<commit_msg>Update mirror test scene for single activity<commit_after>#!/usr/bin/env python
import rospy
from interactivespaces_msgs.msg import GenericMessage
DIRECTOR_MESSAGE = """
{
"description": "bogus",
"duration": 0,
"name": "test whatever",
"resource_uri": "bogus",
"slug": "test message",
"windows": [
{
"activity": "mirror",
"activity_config": {
"viewport": "center"
},
"assets": [
],
"width": 450,
"height": 800,
"presentation_viewport": "left_one",
"x_coord": 0,
"y_coord": 0
}
]
}
"""
if __name__ == '__main__':
scene_msg = GenericMessage()
scene_msg.type = 'json'
scene_msg.message = DIRECTOR_MESSAGE
rospy.init_node('director_messager')
scene_pub = rospy.Publisher('/director/scene', GenericMessage, queue_size=100)
rospy.sleep(2)
scene_pub.publish(scene_msg)
rospy.sleep(2)
|
13daca3feedd8df8803904a60199a9dfa47dad8d
|
fuel_test/cobbler/test_single.py
|
fuel_test/cobbler/test_single.py
|
import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
|
import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
quantum=False,
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
|
Switch off quantum at single node in test
|
Switch off quantum at single node in test
|
Python
|
apache-2.0
|
huntxu/fuel-library,eayunstack/fuel-library,eayunstack/fuel-library,SmartInfrastructures/fuel-library-dev,SmartInfrastructures/fuel-library-dev,stackforge/fuel-library,huntxu/fuel-library,stackforge/fuel-library,zhaochao/fuel-library,xarses/fuel-library,SmartInfrastructures/fuel-library-dev,ddepaoli3/fuel-library-dev,zhaochao/fuel-library,xarses/fuel-library,slystopad/fuel-lib,stackforge/fuel-library,zhaochao/fuel-library,ddepaoli3/fuel-library-dev,Metaswitch/fuel-library,ddepaoli3/fuel-library-dev,slystopad/fuel-lib,stackforge/fuel-library,xarses/fuel-library,eayunstack/fuel-library,Metaswitch/fuel-library,Metaswitch/fuel-library,eayunstack/fuel-library,Metaswitch/fuel-library,zhaochao/fuel-library,SmartInfrastructures/fuel-library-dev,ddepaoli3/fuel-library-dev,huntxu/fuel-library,slystopad/fuel-lib,huntxu/fuel-library,slystopad/fuel-lib,zhaochao/fuel-library,SmartInfrastructures/fuel-library-dev,eayunstack/fuel-library,huntxu/fuel-library,ddepaoli3/fuel-library-dev,xarses/fuel-library
|
import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
Switch off quantum at single node in test
|
import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
quantum=False,
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
<commit_msg>Switch off quantum at single node in test<commit_after>
|
import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
quantum=False,
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
|
import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
Switch off quantum at single node in testimport unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
quantum=False,
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
<commit_msg>Switch off quantum at single node in test<commit_after>import unittest
from fuel_test.cobbler.cobbler_test_case import CobblerTestCase
from fuel_test.manifest import Manifest
from fuel_test.settings import OPENSTACK_SNAPSHOT
class SingleTestCase(CobblerTestCase):
def test_single(self):
Manifest().write_openstack_single_manifest(
remote=self.remote(),
ci=self.ci(),
quantum=False,
)
self.validate(
self.nodes().controllers,
'puppet agent --test')
self.environment().snapshot(OPENSTACK_SNAPSHOT, force=True)
if __name__ == '__main__':
unittest.main()
|
4595138de6d84698404e101031c6d2242753e5f4
|
views.py
|
views.py
|
from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
return HttpResponse("<h1>200 OK</h1>", status=201)
|
from django.conf import settings
from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
if not settings.MERCADOPAGO_ASYNC:
notification.process()
# TODO: Else add to some queue?
return HttpResponse("<h1>200 OK</h1>", status=201)
|
Add aync notification processing support
|
Add aync notification processing support
|
Python
|
isc
|
asermax/django-mercadopago
|
from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
return HttpResponse("<h1>200 OK</h1>", status=201)
Add aync notification processing support
|
from django.conf import settings
from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
if not settings.MERCADOPAGO_ASYNC:
notification.process()
# TODO: Else add to some queue?
return HttpResponse("<h1>200 OK</h1>", status=201)
|
<commit_before>from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
return HttpResponse("<h1>200 OK</h1>", status=201)
<commit_msg>Add aync notification processing support<commit_after>
|
from django.conf import settings
from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
if not settings.MERCADOPAGO_ASYNC:
notification.process()
# TODO: Else add to some queue?
return HttpResponse("<h1>200 OK</h1>", status=201)
|
from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
return HttpResponse("<h1>200 OK</h1>", status=201)
Add aync notification processing supportfrom django.conf import settings
from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
if not settings.MERCADOPAGO_ASYNC:
notification.process()
# TODO: Else add to some queue?
return HttpResponse("<h1>200 OK</h1>", status=201)
|
<commit_before>from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
return HttpResponse("<h1>200 OK</h1>", status=201)
<commit_msg>Add aync notification processing support<commit_after>from django.conf import settings
from django.http import HttpResponse
from .models import Notification
def create_notification(request):
topic = request.GET.get('topic', None)
resource_id = request.GET.get('id', None)
if topic is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter topic',
status=400
)
if resource_id is None:
return HttpResponse(
'<h1>400 Bad Request.</h1>Missing parameter id',
status=400
)
if topic == 'merchant_order':
topic = Notification.TOPIC_ORDER
elif topic == 'payment':
topic = Notification.TOPIC_PAYMENT
else:
return HttpResponse('invalid topic', status=400)
notification, created = Notification.objects.get_or_create(
topic=topic,
resource_id=resource_id,
)
if not created:
notification.processed = False
notification.save()
if not settings.MERCADOPAGO_ASYNC:
notification.process()
# TODO: Else add to some queue?
return HttpResponse("<h1>200 OK</h1>", status=201)
|
ebe91245ad4688346d716aaae5e29a409055c6c6
|
python-package/src/minerva/storage/__init__.py
|
python-package/src/minerva/storage/__init__.py
|
"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification, delta
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
|
"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
|
Remove import of delta module.
|
Remove import of delta module.
|
Python
|
agpl-3.0
|
hendrikx-itc/minerva,hendrikx-itc/minerva
|
"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification, delta
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
Remove import of delta module.
|
"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
|
<commit_before>"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification, delta
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
<commit_msg>Remove import of delta module.<commit_after>
|
"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
|
"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification, delta
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
Remove import of delta module."""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
|
<commit_before>"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification, delta
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
<commit_msg>Remove import of delta module.<commit_after>"""
Provides access and a location for storage class logic like 'trend',
'attribute', etc..
"""
__docformat__ = "restructuredtext en"
__copyright__ = """
Copyright (C) 2008-2013 Hendrikx-ITC B.V.
Distributed under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3, or (at your option) any later
version. The full license is in the file COPYING, distributed as part of
this software.
"""
def load_plugins():
from minerva.storage import trend, attribute, geospatial, notification
"""
Load and return a dictionary with plugins by their names.
"""
return {
'attribute': attribute.create,
'trend': trend.create,
'notification': notification.NotificationPlugin,
'geospatial': geospatial.create
}
def get_plugin(name):
"""
Return storage plugin with name `name`.
"""
return load_plugins().get(name)
|
f76a766f7be4936d34dc14e65a0f1fd974055b20
|
fireplace/cards/tgt/paladin.py
|
fireplace/cards/tgt/paladin.py
|
from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ALL_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
|
from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ENEMY_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
|
Fix Eadric the Pure's target selection
|
Fix Eadric the Pure's target selection
|
Python
|
agpl-3.0
|
liujimj/fireplace,beheh/fireplace,NightKev/fireplace,smallnamespace/fireplace,jleclanche/fireplace,amw2104/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,Ragowit/fireplace,amw2104/fireplace,oftc-ftw/fireplace,Ragowit/fireplace,liujimj/fireplace,Meerkov/fireplace,smallnamespace/fireplace
|
from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ALL_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
Fix Eadric the Pure's target selection
|
from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ENEMY_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
|
<commit_before>from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ALL_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
<commit_msg>Fix Eadric the Pure's target selection<commit_after>
|
from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ENEMY_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
|
from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ALL_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
Fix Eadric the Pure's target selectionfrom ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ENEMY_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
|
<commit_before>from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ALL_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
<commit_msg>Fix Eadric the Pure's target selection<commit_after>from ..utils import *
##
# Minions
# Murloc Knight
class AT_076:
inspire = Summon(CONTROLLER, RandomMurloc())
# Eadric the Pure
class AT_081:
play = Buff(ENEMY_MINIONS, "AT_081e")
##
# Spells
# Seal of Champions
class AT_074:
play = Buff(TARGET, "AT_074e2")
##
# Secrets
# Competitive Spirit
class AT_073:
events = OWN_TURN_BEGIN.on(
Buff(FRIENDLY_MINIONS, "AT_073e"), Reveal(SELF)
)
|
f7cc714a0ea6f9d33ac06c2460f8abbd5991e4ab
|
pi_gpio/handlers.py
|
pi_gpio/handlers.py
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def __init__(self):
super(PinDetail, self).__init__()
self.fields['value'] = fields.Integer
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
|
Add extra field to detail output
|
Add extra field to detail output
|
Python
|
mit
|
thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,thijstriemstra/Pi-GPIO-Server,projectweekend/Pi-GPIO-Server
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
Add extra field to detail output
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def __init__(self):
super(PinDetail, self).__init__()
self.fields['value'] = fields.Integer
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
|
<commit_before>from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
<commit_msg>Add extra field to detail output<commit_after>
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def __init__(self):
super(PinDetail, self).__init__()
self.fields['value'] = fields.Integer
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
|
from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
Add extra field to detail outputfrom flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def __init__(self):
super(PinDetail, self).__init__()
self.fields['value'] = fields.Integer
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
|
<commit_before>from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
<commit_msg>Add extra field to detail output<commit_after>from flask.ext.restful import fields
from meta import BasicResource
from config.pins import PinManager
MANAGER = PinManager()
class Pin(BasicResource):
def __init__(self):
super(Pin, self).__init__()
self.fields = {
"num": fields.Integer,
"mode": fields.String,
"initial": fields.String,
"resistor": fields.String
}
class PinList(Pin):
def get(self):
return self.response(MANAGER.pins, 200)
class PinDetail(Pin):
def __init__(self):
super(PinDetail, self).__init__()
self.fields['value'] = fields.Integer
def get(self, pin_num):
output = MANAGER.read(pin_num)
if not output:
return {'message': 'Pin not found'}, 404
return self.response(output, 200)
def put(self, pin_num):
return {'pin': pin_num}
def patch(self, pin_num):
pass
|
903c0d6a3bda96a0b193cc6efd2f8e868d4d82e2
|
setuptools/tests/test_build_ext.py
|
setuptools/tests/test_build_ext.py
|
"""build_ext tests
"""
import unittest
from distutils.command.build_ext import build_ext as distutils_build_ext
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
|
"""build_ext tests
"""
import unittest
import distutils.command.build_ext as orig
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
|
Use namespacing for easier reading
|
Use namespacing for easier reading
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
"""build_ext tests
"""
import unittest
from distutils.command.build_ext import build_ext as distutils_build_ext
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
Use namespacing for easier reading
|
"""build_ext tests
"""
import unittest
import distutils.command.build_ext as orig
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
|
<commit_before>"""build_ext tests
"""
import unittest
from distutils.command.build_ext import build_ext as distutils_build_ext
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
<commit_msg>Use namespacing for easier reading<commit_after>
|
"""build_ext tests
"""
import unittest
import distutils.command.build_ext as orig
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
|
"""build_ext tests
"""
import unittest
from distutils.command.build_ext import build_ext as distutils_build_ext
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
Use namespacing for easier reading"""build_ext tests
"""
import unittest
import distutils.command.build_ext as orig
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
|
<commit_before>"""build_ext tests
"""
import unittest
from distutils.command.build_ext import build_ext as distutils_build_ext
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = distutils_build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
<commit_msg>Use namespacing for easier reading<commit_after>"""build_ext tests
"""
import unittest
import distutils.command.build_ext as orig
from setuptools.command.build_ext import build_ext
from setuptools.dist import Distribution
class TestBuildExtTest(unittest.TestCase):
def test_get_ext_filename(self):
# setuptools needs to give back the same
# result than distutils, even if the fullname
# is not in ext_map
dist = Distribution()
cmd = build_ext(dist)
cmd.ext_map['foo/bar'] = ''
res = cmd.get_ext_filename('foo')
wanted = orig.build_ext.get_ext_filename(cmd, 'foo')
assert res == wanted
|
1c254d8869482241de14255c25edd875ca369e46
|
fortuitus/frunner/factories.py
|
fortuitus/frunner/factories.py
|
import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
|
import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
base_url = 'http://api.example.com/'
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
|
Fix TestRun factory missing base_url
|
Fix TestRun factory missing base_url
|
Python
|
mit
|
elegion/djangodash2012,elegion/djangodash2012
|
import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
Fix TestRun factory missing base_url
|
import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
base_url = 'http://api.example.com/'
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
|
<commit_before>import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
<commit_msg>Fix TestRun factory missing base_url<commit_after>
|
import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
base_url = 'http://api.example.com/'
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
|
import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
Fix TestRun factory missing base_urlimport factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
base_url = 'http://api.example.com/'
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
|
<commit_before>import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
<commit_msg>Fix TestRun factory missing base_url<commit_after>import factory
from fortuitus.feditor.factories import TestProjectF
from fortuitus.frunner import models
class TestRunF(factory.Factory):
FACTORY_FOR = models.TestRun
project = factory.SubFactory(TestProjectF)
base_url = 'http://api.example.com/'
class TestCaseF(factory.Factory):
FACTORY_FOR = models.TestCase
testrun = factory.SubFactory(TestRunF)
name = factory.Sequence(lambda n: 'TestCase #%s' % n)
order = 1
login_type = models.models_base.LoginType.NONE
class TestCaseStepF(factory.Factory):
FACTORY_FOR = models.TestCaseStep
testcase = factory.SubFactory(TestCaseF)
order = 1
method = models.models_base.Method.GET
url = 'user_list.json'
class TestCaseAssertF(factory.Factory):
FACTORY_FOR = models.TestCaseAssert
step = factory.SubFactory(TestCaseStepF)
order = 1
lhs = ''
rhs = ''
operator = models.models_base.method_choices[0][0]
|
8ae66dc2f9b3dd58db0c41a4bf44229dff2dc652
|
falmer/content/models/__init__.py
|
falmer/content/models/__init__.py
|
from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
|
from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage, OfficersIndex
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
OfficersIndex,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
|
Add officers index to contentmap
|
Add officers index to contentmap
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
Add officers index to contentmap
|
from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage, OfficersIndex
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
OfficersIndex,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
|
<commit_before>from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
<commit_msg>Add officers index to contentmap<commit_after>
|
from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage, OfficersIndex
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
OfficersIndex,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
|
from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
Add officers index to contentmapfrom falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage, OfficersIndex
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
OfficersIndex,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
|
<commit_before>from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
<commit_msg>Add officers index to contentmap<commit_after>from falmer.content.models.core import ClickThrough
from .staff import StaffPage, StaffMemberSnippet
from .section_content import SectionContentPage
from .selection_grid import SelectionGridPage
from .officer_overview import OfficerOverviewPage, OfficersIndex
from .homepage import HomePage
from .freshers import FreshersHomepage
from .generic import KBRootPage, KBCategoryPage, AnswerPage, ReferencePage, DetailedGuidePage, DetailedGuideSectionPage
from .basic import StubPage, BasicContentPage
from .outlets import OutletIndexPage, OutletPage
from .scheme import SchemeIndexPage, SchemePage
all_pages = (
StaffPage,
StaffMemberSnippet,
SectionContentPage,
SelectionGridPage,
OfficerOverviewPage,
OfficersIndex,
HomePage,
KBRootPage,
KBCategoryPage,
AnswerPage,
ReferencePage,
DetailedGuidePage,
DetailedGuideSectionPage,
StubPage,
BasicContentPage,
OutletIndexPage,
OutletPage,
SchemeIndexPage,
SchemePage,
FreshersHomepage,
ClickThrough,
)
name_to_class_map = {cls.__name__: cls for cls in all_pages}
|
8a3eb221f51850d8a97c6d72715e644f52346c9f
|
swish/client.py
|
swish/client.py
|
import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payment_reference='', message=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url,
'payment_reference': payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
|
import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payee_payment_reference='', message=''):
data = {
'payeeAlias': self.payee_alias,
'amount': amount,
'currency': currency,
'callbackUrl': callback_url,
'payeePaymentReference': payee_payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
|
Correct data in payment request
|
Correct data in payment request
|
Python
|
mit
|
playing-se/swish-python
|
import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payment_reference='', message=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url,
'payment_reference': payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
Correct data in payment request
|
import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payee_payment_reference='', message=''):
data = {
'payeeAlias': self.payee_alias,
'amount': amount,
'currency': currency,
'callbackUrl': callback_url,
'payeePaymentReference': payee_payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
|
<commit_before>import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payment_reference='', message=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url,
'payment_reference': payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
<commit_msg>Correct data in payment request<commit_after>
|
import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payee_payment_reference='', message=''):
data = {
'payeeAlias': self.payee_alias,
'amount': amount,
'currency': currency,
'callbackUrl': callback_url,
'payeePaymentReference': payee_payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
|
import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payment_reference='', message=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url,
'payment_reference': payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
Correct data in payment requestimport json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payee_payment_reference='', message=''):
data = {
'payeeAlias': self.payee_alias,
'amount': amount,
'currency': currency,
'callbackUrl': callback_url,
'payeePaymentReference': payee_payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
|
<commit_before>import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payment_reference='', message=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url,
'payment_reference': payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
<commit_msg>Correct data in payment request<commit_after>import json
import requests
from .environment import Environment
class SwishClient(object):
def __init__(self, environment, payee_alias, cert):
self.environment = Environment.parse_environment(environment)
self.payee_alias = payee_alias
self.cert = cert
def post(self, endpoint, json):
url = self.environment.base_url + endpoint
return requests.post(url=url, json=json, headers={'Content-Type': 'application/json'}, cert=self.cert)
def get(self, endpoint, id):
print("Not implemented yet!")
def payment_request(self, amount, currency, callback_url, payee_payment_reference='', message=''):
data = {
'payeeAlias': self.payee_alias,
'amount': amount,
'currency': currency,
'callbackUrl': callback_url,
'payeePaymentReference': payee_payment_reference,
'message': message
}
r = self.post('paymentrequests', json.dumps(data))
return r
def get_payment_request(payment_request_id):
print("Not implemented yet!")
def refund(self, amount, currency, callback_url, original_payment_reference, payer_payment_reference=''):
data = {
'amount': amount,
'currency': currency,
'callback_url': callback_url
}
r = self.post('refunds', json.dumps(data))
return r
def get_refund(refund_id):
print("Not implemented yet!")
|
5bf50c2f36e00004dac0bc9bd604ac99b77261df
|
rename_fotos/tests/test_rename_fotos.py
|
rename_fotos/tests/test_rename_fotos.py
|
import pytest
import ../__init__ as init
from selenium import webdriver
LOCAL_INSTANCE = "127.0.0.1:5000"
def test_is_running():
init.is_running()
# Firefox
driver = webdriver.Firefox()
driver.get(LOCAl_INSTANCE)
assert driver.body == "Flask is running"
|
import pytest
import rename_fotos as rfapp
LOCAL_INSTANCE = "127.0.0.1:5000"
@pytest.fixture
def client():
rfapp.app.config['TESTING'] = True
with rfapp.app.test_client() as client:
with rfapp.app.app_context():
rfapp.init_db()
yield client
def test_is_running(client):
response = client.get('/')
assert "FAIL" in response.data
|
Switch to flask built in tests
|
Switch to flask built in tests
|
Python
|
mit
|
daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various,daveinnyc/various
|
import pytest
import ../__init__ as init
from selenium import webdriver
LOCAL_INSTANCE = "127.0.0.1:5000"
def test_is_running():
init.is_running()
# Firefox
driver = webdriver.Firefox()
driver.get(LOCAl_INSTANCE)
assert driver.body == "Flask is running"
Switch to flask built in tests
|
import pytest
import rename_fotos as rfapp
LOCAL_INSTANCE = "127.0.0.1:5000"
@pytest.fixture
def client():
rfapp.app.config['TESTING'] = True
with rfapp.app.test_client() as client:
with rfapp.app.app_context():
rfapp.init_db()
yield client
def test_is_running(client):
response = client.get('/')
assert "FAIL" in response.data
|
<commit_before>import pytest
import ../__init__ as init
from selenium import webdriver
LOCAL_INSTANCE = "127.0.0.1:5000"
def test_is_running():
init.is_running()
# Firefox
driver = webdriver.Firefox()
driver.get(LOCAl_INSTANCE)
assert driver.body == "Flask is running"
<commit_msg>Switch to flask built in tests<commit_after>
|
import pytest
import rename_fotos as rfapp
LOCAL_INSTANCE = "127.0.0.1:5000"
@pytest.fixture
def client():
rfapp.app.config['TESTING'] = True
with rfapp.app.test_client() as client:
with rfapp.app.app_context():
rfapp.init_db()
yield client
def test_is_running(client):
response = client.get('/')
assert "FAIL" in response.data
|
import pytest
import ../__init__ as init
from selenium import webdriver
LOCAL_INSTANCE = "127.0.0.1:5000"
def test_is_running():
init.is_running()
# Firefox
driver = webdriver.Firefox()
driver.get(LOCAl_INSTANCE)
assert driver.body == "Flask is running"
Switch to flask built in testsimport pytest
import rename_fotos as rfapp
LOCAL_INSTANCE = "127.0.0.1:5000"
@pytest.fixture
def client():
rfapp.app.config['TESTING'] = True
with rfapp.app.test_client() as client:
with rfapp.app.app_context():
rfapp.init_db()
yield client
def test_is_running(client):
response = client.get('/')
assert "FAIL" in response.data
|
<commit_before>import pytest
import ../__init__ as init
from selenium import webdriver
LOCAL_INSTANCE = "127.0.0.1:5000"
def test_is_running():
init.is_running()
# Firefox
driver = webdriver.Firefox()
driver.get(LOCAl_INSTANCE)
assert driver.body == "Flask is running"
<commit_msg>Switch to flask built in tests<commit_after>import pytest
import rename_fotos as rfapp
LOCAL_INSTANCE = "127.0.0.1:5000"
@pytest.fixture
def client():
rfapp.app.config['TESTING'] = True
with rfapp.app.test_client() as client:
with rfapp.app.app_context():
rfapp.init_db()
yield client
def test_is_running(client):
response = client.get('/')
assert "FAIL" in response.data
|
797786d53d525aabd9495ac68a8f319680e09f89
|
src/syntax/infix_coordination.py
|
src/syntax/infix_coordination.py
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
if self.has_infix_coordination:
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
else:
result_string = ' '.join(tree.leaves())
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
|
Break infix coordination only if there is one
|
Break infix coordination only if there is one
|
Python
|
mit
|
Somsubhra/Simplify,Somsubhra/Simplify,Somsubhra/Simplify
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
Break infix coordination only if there is one
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
if self.has_infix_coordination:
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
else:
result_string = ' '.join(tree.leaves())
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
|
<commit_before>__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
<commit_msg>Break infix coordination only if there is one<commit_after>
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
if self.has_infix_coordination:
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
else:
result_string = ' '.join(tree.leaves())
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
Break infix coordination only if there is one__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
if self.has_infix_coordination:
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
else:
result_string = ' '.join(tree.leaves())
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
|
<commit_before>__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
<commit_msg>Break infix coordination only if there is one<commit_after>__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.slice_point = -1
self.subtree_list = []
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
print self.slice_point
print self.subtree_list
if self.has_infix_coordination:
result_string = ' '.join(self.subtree_list[:self.slice_point-1]) +\
'. ' + ' '.join(self.subtree_list[self.slice_point:])
else:
result_string = ' '.join(tree.leaves())
print result_string
return result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
counter = 0
for node in sentence_root:
counter += 1
self.subtree_list.append(' '.join(node.leaves()))
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
self.slice_point = counter
|
6cabf9c03cd40ae748d03f1a2fd3f4f3db6c47a5
|
protocols/models.py
|
protocols/models.py
|
from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField()
voted_against = models.PositiveIntegerField()
voted_abstain = models.PositiveIntegerField()
statement = models.TextField()
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
|
from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField(blank=True, null=True)
voted_against = models.PositiveIntegerField(blank=True, null=True)
voted_abstain = models.PositiveIntegerField(blank=True, null=True)
statement = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
|
Add option for blank voting
|
Add option for blank voting
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField()
voted_against = models.PositiveIntegerField()
voted_abstain = models.PositiveIntegerField()
statement = models.TextField()
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
Add option for blank voting
|
from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField(blank=True, null=True)
voted_against = models.PositiveIntegerField(blank=True, null=True)
voted_abstain = models.PositiveIntegerField(blank=True, null=True)
statement = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
|
<commit_before>from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField()
voted_against = models.PositiveIntegerField()
voted_abstain = models.PositiveIntegerField()
statement = models.TextField()
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
<commit_msg>Add option for blank voting<commit_after>
|
from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField(blank=True, null=True)
voted_against = models.PositiveIntegerField(blank=True, null=True)
voted_abstain = models.PositiveIntegerField(blank=True, null=True)
statement = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
|
from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField()
voted_against = models.PositiveIntegerField()
voted_abstain = models.PositiveIntegerField()
statement = models.TextField()
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
Add option for blank votingfrom datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField(blank=True, null=True)
voted_against = models.PositiveIntegerField(blank=True, null=True)
voted_abstain = models.PositiveIntegerField(blank=True, null=True)
statement = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
|
<commit_before>from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField()
voted_against = models.PositiveIntegerField()
voted_abstain = models.PositiveIntegerField()
statement = models.TextField()
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
<commit_msg>Add option for blank voting<commit_after>from datetime import datetime
from django.db import models
class Topic(models.Model):
name = models.CharField(max_length=100)
description = models.TextField(blank=True, null=True)
attachment = models.ManyToManyField('attachments.Attachment')
voted_for = models.PositiveIntegerField(blank=True, null=True)
voted_against = models.PositiveIntegerField(blank=True, null=True)
voted_abstain = models.PositiveIntegerField(blank=True, null=True)
statement = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.name
class Institution(models.Model):
name = models.CharField(max_length=64)
def __unicode__(self):
return self.name
class Protocol(models.Model):
conducted_at = models.DateField(default=datetime.now)
institution = models.ForeignKey(Institution)
number = models.CharField(max_length=20, unique=True)
scheduled_time = models.TimeField()
absent = models.ManyToManyField('members.User', related_name='meetings_absent')
attendents = models.ManyToManyField('members.User', related_name='meetings_attend')
start_time = models.TimeField()
additional = models.TextField(blank=True, null=True)
quorum = models.PositiveIntegerField()
majority = models.PositiveIntegerField()
current_majority = models.PositiveIntegerField()
topics = models.ManyToManyField(Topic)
information = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.number
|
fc7ba9019b42f056713b81bfee70f9e780b4aab5
|
models/rasmachine/twitter_client.py
|
models/rasmachine/twitter_client.py
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def update_status(msg, auth_file='twitter_cred.txt'):
twitter_auth = get_oauth(auth_file)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth_file(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def get_oauth_dict(auth_dict):
oauth = tweepy.OAuthHandler(auth_dict.get('consumer_token'),
auth_dict.get('consumer_secred'))
oauth.set_access_token(auth_dict.get('access_token'),
auth_dict.get('access_secret'))
return oauth
def update_status(msg, twitter_cred):
twitter_auth = get_oauth_dict(twitter_cred)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
|
Implement dict credentials in Twitter client
|
Implement dict credentials in Twitter client
|
Python
|
bsd-2-clause
|
pvtodorov/indra,johnbachman/belpy,jmuhlich/indra,jmuhlich/indra,bgyori/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,pvtodorov/indra,johnbachman/indra,bgyori/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,jmuhlich/indra,sorgerlab/indra
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def update_status(msg, auth_file='twitter_cred.txt'):
twitter_auth = get_oauth(auth_file)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
Implement dict credentials in Twitter client
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth_file(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def get_oauth_dict(auth_dict):
oauth = tweepy.OAuthHandler(auth_dict.get('consumer_token'),
auth_dict.get('consumer_secred'))
oauth.set_access_token(auth_dict.get('access_token'),
auth_dict.get('access_secret'))
return oauth
def update_status(msg, twitter_cred):
twitter_auth = get_oauth_dict(twitter_cred)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
|
<commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def update_status(msg, auth_file='twitter_cred.txt'):
twitter_auth = get_oauth(auth_file)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
<commit_msg>Implement dict credentials in Twitter client<commit_after>
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth_file(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def get_oauth_dict(auth_dict):
oauth = tweepy.OAuthHandler(auth_dict.get('consumer_token'),
auth_dict.get('consumer_secred'))
oauth.set_access_token(auth_dict.get('access_token'),
auth_dict.get('access_secret'))
return oauth
def update_status(msg, twitter_cred):
twitter_auth = get_oauth_dict(twitter_cred)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def update_status(msg, auth_file='twitter_cred.txt'):
twitter_auth = get_oauth(auth_file)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
Implement dict credentials in Twitter clientfrom __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth_file(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def get_oauth_dict(auth_dict):
oauth = tweepy.OAuthHandler(auth_dict.get('consumer_token'),
auth_dict.get('consumer_secred'))
oauth.set_access_token(auth_dict.get('access_token'),
auth_dict.get('access_secret'))
return oauth
def update_status(msg, twitter_cred):
twitter_auth = get_oauth_dict(twitter_cred)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
|
<commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def update_status(msg, auth_file='twitter_cred.txt'):
twitter_auth = get_oauth(auth_file)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
<commit_msg>Implement dict credentials in Twitter client<commit_after>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
import tweepy
def get_oauth_file(auth_file):
try:
fh = open(auth_file, 'rt')
except IOError:
print('Could not get Twitter credentials.')
return None
lines = [l.strip() for l in fh.readlines()]
oauth = tweepy.OAuthHandler(lines[0], lines[1])
oauth.set_access_token(lines[2], lines[3])
fh.close()
return oauth
def get_oauth_dict(auth_dict):
oauth = tweepy.OAuthHandler(auth_dict.get('consumer_token'),
auth_dict.get('consumer_secred'))
oauth.set_access_token(auth_dict.get('access_token'),
auth_dict.get('access_secret'))
return oauth
def update_status(msg, twitter_cred):
twitter_auth = get_oauth_dict(twitter_cred)
if twitter_auth is None:
return
twitter_api = tweepy.API(twitter_auth)
twitter_api.update_status(msg)
|
fe9512de5e41a6892826e70543637b893f3bd6f5
|
temba/msgs/migrations/0087_populate_broadcast_send_all.py
|
temba/msgs/migrations/0087_populate_broadcast_send_all.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
broadcast_count = len(broadcast_ids)
print('Starting to update %d broadcasts send all field...' % broadcast_count)
updated = 0
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
print("Updated %d of %d broadcasts" % (updated + len(chunk), broadcast_count))
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
Print progress of data migration
|
Print progress of data migration
|
Python
|
agpl-3.0
|
pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
Print progress of data migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
broadcast_count = len(broadcast_ids)
print('Starting to update %d broadcasts send all field...' % broadcast_count)
updated = 0
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
print("Updated %d of %d broadcasts" % (updated + len(chunk), broadcast_count))
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
<commit_msg>Print progress of data migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
broadcast_count = len(broadcast_ids)
print('Starting to update %d broadcasts send all field...' % broadcast_count)
updated = 0
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
print("Updated %d of %d broadcasts" % (updated + len(chunk), broadcast_count))
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
Print progress of data migration# -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
broadcast_count = len(broadcast_ids)
print('Starting to update %d broadcasts send all field...' % broadcast_count)
updated = 0
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
print("Updated %d of %d broadcasts" % (updated + len(chunk), broadcast_count))
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
<commit_msg>Print progress of data migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-04-06 17:33
from __future__ import unicode_literals
from django.db import migrations
from temba.utils import chunk_list
def do_populate_send_all(Broadcast):
broadcast_ids = Broadcast.objects.all().values_list('id', flat=True)
broadcast_count = len(broadcast_ids)
print('Starting to update %d broadcasts send all field...' % broadcast_count)
updated = 0
for chunk in chunk_list(broadcast_ids, 1000):
Broadcast.objects.filter(pk__in=chunk).update(send_all=False)
print("Updated %d of %d broadcasts" % (updated + len(chunk), broadcast_count))
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
do_populate_send_all(Broadcast)
def apply_manual():
from temba.msgs.models import Broadcast
do_populate_send_all(Broadcast)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0086_broadcast_send_all'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
|
3dfb310fa4df74c89b46fabb8195eb62b53dc5be
|
optimisers.py
|
optimisers.py
|
import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
|
import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
class GradientDescentMomentumOptimiser(Optimiser):
def __init__(self, network, step_size, momentum = 0.9):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
self.momentum = momentum
# initialise variables for momentum
self.last_param_updates = []
for param in self.nn.get_params():
self.last_param_updates.append(np.zeros_like(param.value))
def update_params(self):
for param, last_update in zip(self.nn.get_params(), self.last_param_updates):
update = self.momentum * last_update + self.step_size * param.grad
param.value += self.step_sign * update
last_update[:] = update
|
Add GradientDescent with Momentum Optimiser.
|
Add GradientDescent with Momentum Optimiser.
|
Python
|
mit
|
Hornobster/Numpy-Neural-Net
|
import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
Add GradientDescent with Momentum Optimiser.
|
import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
class GradientDescentMomentumOptimiser(Optimiser):
def __init__(self, network, step_size, momentum = 0.9):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
self.momentum = momentum
# initialise variables for momentum
self.last_param_updates = []
for param in self.nn.get_params():
self.last_param_updates.append(np.zeros_like(param.value))
def update_params(self):
for param, last_update in zip(self.nn.get_params(), self.last_param_updates):
update = self.momentum * last_update + self.step_size * param.grad
param.value += self.step_sign * update
last_update[:] = update
|
<commit_before>import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
<commit_msg>Add GradientDescent with Momentum Optimiser.<commit_after>
|
import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
class GradientDescentMomentumOptimiser(Optimiser):
def __init__(self, network, step_size, momentum = 0.9):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
self.momentum = momentum
# initialise variables for momentum
self.last_param_updates = []
for param in self.nn.get_params():
self.last_param_updates.append(np.zeros_like(param.value))
def update_params(self):
for param, last_update in zip(self.nn.get_params(), self.last_param_updates):
update = self.momentum * last_update + self.step_size * param.grad
param.value += self.step_sign * update
last_update[:] = update
|
import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
Add GradientDescent with Momentum Optimiser.import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
class GradientDescentMomentumOptimiser(Optimiser):
def __init__(self, network, step_size, momentum = 0.9):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
self.momentum = momentum
# initialise variables for momentum
self.last_param_updates = []
for param in self.nn.get_params():
self.last_param_updates.append(np.zeros_like(param.value))
def update_params(self):
for param, last_update in zip(self.nn.get_params(), self.last_param_updates):
update = self.momentum * last_update + self.step_size * param.grad
param.value += self.step_sign * update
last_update[:] = update
|
<commit_before>import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
<commit_msg>Add GradientDescent with Momentum Optimiser.<commit_after>import numpy as np
class Optimiser:
def __init__(self, network):
self.nn = network
self.step_sign = -1.0 # minimise by default
def step(self):
self.nn.forward()
self.nn.reset_gradients()
self.nn.backward()
self.update_params()
def update_params(self):
pass
def minimise(self):
self.step_sign = -1.0
return self
def maximise(self):
self.step_sign = 1.0
return self
class GradientDescentOptimiser(Optimiser):
def __init__(self, network, step_size):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
def update_params(self):
for param in self.nn.get_params():
param.value += (self.step_sign * self.step_size) * param.grad
class GradientDescentMomentumOptimiser(Optimiser):
def __init__(self, network, step_size, momentum = 0.9):
Optimiser.__init__(self, network)
self.step_size = abs(step_size)
self.momentum = momentum
# initialise variables for momentum
self.last_param_updates = []
for param in self.nn.get_params():
self.last_param_updates.append(np.zeros_like(param.value))
def update_params(self):
for param, last_update in zip(self.nn.get_params(), self.last_param_updates):
update = self.momentum * last_update + self.step_size * param.grad
param.value += self.step_sign * update
last_update[:] = update
|
34015dbc34b2f4e44b104070bae8c3d1956d7e12
|
is_valid/wrapper_predicates.py
|
is_valid/wrapper_predicates.py
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (False, msg) if explain else False
return predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
Add include keyword arg to is_tranformed
|
Add include keyword arg to is_tranformed
|
Python
|
mit
|
Daanvdk/is_valid
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (False, msg) if explain else False
return predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
Add include keyword arg to is_tranformed
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
<commit_before>import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (False, msg) if explain else False
return predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
<commit_msg>Add include keyword arg to is_tranformed<commit_after>
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (False, msg) if explain else False
return predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
Add include keyword arg to is_tranformedimport json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
<commit_before>import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (False, msg) if explain else False
return predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
<commit_msg>Add include keyword arg to is_tranformed<commit_after>import json
def is_transformed(transform, predicate, *args, exceptions=[
Exception
], msg='data can\'t be transformed', **kwargs):
def is_valid(data, explain=False, include=False):
try:
data = transform(data, *args, **kwargs)
except Exception as e:
if not any(isinstance(e, exc) for exc in exceptions):
raise e
return (
(False, msg, None) if explain else (False, None)
) if include else (
(False, msg) if explain else False
)
return ((
predicate(data, explain=True) + (data,)
) if explain else (
predicate(data), data
)) if include else predicate(data, explain=explain)
return is_valid
def is_json(predicate, *args, **kwargs):
return is_transformed(json.loads, predicate, *args, exceptions=[
json.JSONDecodeError
], msg='data is not valid json', **kwargs)
|
f8d94b93427ff92ae6eed58a81058cce4e661cd2
|
solum/tests/common/test_service.py
|
solum/tests/common/test_service.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import plan as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Plan()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import component as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Component()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Component,
abstract.Component))
|
Test service with Component instead of Plan db object
|
Test service with Component instead of Plan db object
Since plan db objects are getting removed in add-plan-in-swift,
we need to test service with another object.
Change-Id: I85537ef17f8c125d3de85ab3625ea91e9487376f
|
Python
|
apache-2.0
|
gilbertpilz/solum,ed-/solum,devdattakulkarni/test-solum,stackforge/solum,gilbertpilz/solum,openstack/solum,ed-/solum,devdattakulkarni/test-solum,ed-/solum,ed-/solum,gilbertpilz/solum,gilbertpilz/solum,openstack/solum,stackforge/solum
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import plan as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Plan()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
Test service with Component instead of Plan db object
Since plan db objects are getting removed in add-plan-in-swift,
we need to test service with another object.
Change-Id: I85537ef17f8c125d3de85ab3625ea91e9487376f
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import component as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Component()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Component,
abstract.Component))
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import plan as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Plan()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
<commit_msg>Test service with Component instead of Plan db object
Since plan db objects are getting removed in add-plan-in-swift,
we need to test service with another object.
Change-Id: I85537ef17f8c125d3de85ab3625ea91e9487376f<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import component as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Component()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Component,
abstract.Component))
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import plan as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Plan()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
Test service with Component instead of Plan db object
Since plan db objects are getting removed in add-plan-in-swift,
we need to test service with another object.
Change-Id: I85537ef17f8c125d3de85ab3625ea91e9487376f# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import component as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Component()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Component,
abstract.Component))
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import plan as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Plan()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Plan, abstract.Plan))
<commit_msg>Test service with Component instead of Plan db object
Since plan db objects are getting removed in add-plan-in-swift,
we need to test service with another object.
Change-Id: I85537ef17f8c125d3de85ab3625ea91e9487376f<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
test_service
----------------------------------
Tests for `solum.common.service` module.
"""
import testtools
from solum.common import service
from solum import objects
from solum.objects import component as abstract
from solum.tests import base
class TestService(base.BaseTestCase):
def test_prepare_invokes_object_load(self):
objects.registry.clear()
with testtools.ExpectedException(KeyError):
objects.registry.Component()
service.prepare_service([])
self.assertTrue(issubclass(objects.registry.Component,
abstract.Component))
|
1c4b3fe1204bfa40c1d7b6444ab645826e4c1d1f
|
Filter.py
|
Filter.py
|
# tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
self.__target = Id(spec.pop(0))
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
|
# tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
idval = spec.pop(0)
if idval != "???":
self.__target = Id(idval)
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
|
Fix ValueError when flowid is ???
|
Fix ValueError when flowid is ???
A filter like the following:
tc filter add dev srvif parent 1: protocol ip u32 match ip dst 1.2.3.4/32 action drop
is reported in "tc show filter" as:
filter parent 1: protocol ip [...] flowid ??? [..]
|
Python
|
mit
|
ze-phyr-us/tcviz
|
# tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
self.__target = Id(spec.pop(0))
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
Fix ValueError when flowid is ???
A filter like the following:
tc filter add dev srvif parent 1: protocol ip u32 match ip dst 1.2.3.4/32 action drop
is reported in "tc show filter" as:
filter parent 1: protocol ip [...] flowid ??? [..]
|
# tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
idval = spec.pop(0)
if idval != "???":
self.__target = Id(idval)
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
|
<commit_before># tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
self.__target = Id(spec.pop(0))
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
<commit_msg>Fix ValueError when flowid is ???
A filter like the following:
tc filter add dev srvif parent 1: protocol ip u32 match ip dst 1.2.3.4/32 action drop
is reported in "tc show filter" as:
filter parent 1: protocol ip [...] flowid ??? [..]<commit_after>
|
# tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
idval = spec.pop(0)
if idval != "???":
self.__target = Id(idval)
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
|
# tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
self.__target = Id(spec.pop(0))
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
Fix ValueError when flowid is ???
A filter like the following:
tc filter add dev srvif parent 1: protocol ip u32 match ip dst 1.2.3.4/32 action drop
is reported in "tc show filter" as:
filter parent 1: protocol ip [...] flowid ??? [..]# tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
idval = spec.pop(0)
if idval != "???":
self.__target = Id(idval)
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
|
<commit_before># tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
self.__target = Id(spec.pop(0))
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
<commit_msg>Fix ValueError when flowid is ???
A filter like the following:
tc filter add dev srvif parent 1: protocol ip u32 match ip dst 1.2.3.4/32 action drop
is reported in "tc show filter" as:
filter parent 1: protocol ip [...] flowid ??? [..]<commit_after># tcviz 1.2
#
# Licensed under the terms of the MIT/X11 license.
# Copyright (c) 2009-2013 Vita Smid <http://ze.phyr.us>
import textwrap
from Id import Id
class Filter:
COLOR = '#999999'
def __init__(self, spec=None):
self.__parent = None
self.__target = None
self.__params = []
if spec is not None:
self.parseSpec(spec)
def parseSpec(self, spec):
spec = spec.split(' ')[2:]
self.__parent = Id(spec.pop(0))
while spec:
item = spec.pop(0)
if item == 'classid' or item == 'flowid':
idval = spec.pop(0)
if idval != "???":
self.__target = Id(idval)
else:
self.__params.append(item)
def getEdgeSpec(self):
if self.__target is None:
return ''
label = '<br/>'.join(textwrap.wrap(' '.join(self.__params), 20))
fmt = '"%s" -> "%s" [ arrowhead = "vee", color = "%s", label = <<font point-size="10" color="%s">%s</font>>, style = "dotted" ];'
return fmt % (self.__parent, self.__target, self.COLOR, self.COLOR, label)
|
f69a9a3e49f6a242be2d0d8d9eb6ff104e25247b
|
pyvarnish/remote.py
|
pyvarnish/remote.py
|
# -*- coding: utf-8 -*-
__author__ = 'John Moylan'
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
sshconfig.parse(open(SSH_CONFIG))
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
|
# -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
|
Add an exception if ssh not configured
|
Add an exception if ssh not configured
|
Python
|
bsd-3-clause
|
redsnapper8t8/pyvarnish
|
# -*- coding: utf-8 -*-
__author__ = 'John Moylan'
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
sshconfig.parse(open(SSH_CONFIG))
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()Add an exception if ssh not configured
|
# -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
|
<commit_before># -*- coding: utf-8 -*-
__author__ = 'John Moylan'
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
sshconfig.parse(open(SSH_CONFIG))
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()<commit_msg>Add an exception if ssh not configured<commit_after>
|
# -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
|
# -*- coding: utf-8 -*-
__author__ = 'John Moylan'
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
sshconfig.parse(open(SSH_CONFIG))
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()Add an exception if ssh not configured# -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
|
<commit_before># -*- coding: utf-8 -*-
__author__ = 'John Moylan'
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
sshconfig.parse(open(SSH_CONFIG))
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()<commit_msg>Add an exception if ssh not configured<commit_after># -*- coding: utf-8 -*-
__author__ = 'John Moylan'
import sys
from paramiko import SSHClient, SSHConfig, AutoAddPolicy
from pyvarnish.settings import SSH_CONFIG
class Varnish_admin():
def __init__(self, server=''):
self.server = server
self.conf = self.config()
def config(self):
sshconfig = SSHConfig()
try:
sshconfig.parse(open(SSH_CONFIG))
except IOError:
print "your app needs to have a valid " \
"ssh config file location in settings.py"
sys.exit(1)
return sshconfig.lookup(self.server)
def runcmd(self, cmd):
try:
client = SSHClient()
client.load_system_host_keys()
client.set_missing_host_key_policy(AutoAddPolicy())
client.connect(self.conf['hostname'],
port = int(self.conf['port']),
username = self.conf['user'],
key_filename = self.conf['identityfile'],
password = None,)
stdin, stdout, stderr = client.exec_command(cmd)
return ''.join([i.rstrip('\r\n ').lstrip() for i in stdout.readlines()])
finally:
client.close()
|
afbb9a9be46c6a9db02f6f3256c82b9939ce5c9e
|
src/rna_seq/forms.py
|
src/rna_seq/forms.py
|
from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout, Submit
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import AbstractAnalysisCreateForm, AnalysisCommonLayout
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = (
*AbstractAnalysisCreateForm.Meta.fields,
'quality_check', 'trim_adapter', 'rm_duplicate',
)
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = FormHelper()
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
FormActions(
Submit(
'save', _('Create New Analysis'), css_class='btn-lg',
)
)
)
helper.include_media = False
return helper
|
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import InlineField
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import (
AbstractAnalysisCreateForm,
AnalysisCommonLayout,
AnalysisFormActions,
Include,
)
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = '__all__'
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = super().helper
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
AnalysisFormActions(),
)
return helper
|
Use analysis base class form helper and form building blocks
|
Use analysis base class form helper and form building blocks
|
Python
|
mit
|
ccwang002/biocloud-server-kai,ccwang002/biocloud-server-kai,ccwang002/biocloud-server-kai
|
from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout, Submit
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import AbstractAnalysisCreateForm, AnalysisCommonLayout
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = (
*AbstractAnalysisCreateForm.Meta.fields,
'quality_check', 'trim_adapter', 'rm_duplicate',
)
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = FormHelper()
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
FormActions(
Submit(
'save', _('Create New Analysis'), css_class='btn-lg',
)
)
)
helper.include_media = False
return helper
Use analysis base class form helper and form building blocks
|
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import InlineField
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import (
AbstractAnalysisCreateForm,
AnalysisCommonLayout,
AnalysisFormActions,
Include,
)
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = '__all__'
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = super().helper
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
AnalysisFormActions(),
)
return helper
|
<commit_before>from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout, Submit
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import AbstractAnalysisCreateForm, AnalysisCommonLayout
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = (
*AbstractAnalysisCreateForm.Meta.fields,
'quality_check', 'trim_adapter', 'rm_duplicate',
)
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = FormHelper()
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
FormActions(
Submit(
'save', _('Create New Analysis'), css_class='btn-lg',
)
)
)
helper.include_media = False
return helper
<commit_msg>Use analysis base class form helper and form building blocks<commit_after>
|
from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import InlineField
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import (
AbstractAnalysisCreateForm,
AnalysisCommonLayout,
AnalysisFormActions,
Include,
)
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = '__all__'
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = super().helper
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
AnalysisFormActions(),
)
return helper
|
from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout, Submit
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import AbstractAnalysisCreateForm, AnalysisCommonLayout
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = (
*AbstractAnalysisCreateForm.Meta.fields,
'quality_check', 'trim_adapter', 'rm_duplicate',
)
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = FormHelper()
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
FormActions(
Submit(
'save', _('Create New Analysis'), css_class='btn-lg',
)
)
)
helper.include_media = False
return helper
Use analysis base class form helper and form building blocksfrom crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import InlineField
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import (
AbstractAnalysisCreateForm,
AnalysisCommonLayout,
AnalysisFormActions,
Include,
)
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = '__all__'
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = super().helper
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
AnalysisFormActions(),
)
return helper
|
<commit_before>from crispy_forms.bootstrap import FormActions
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout, Submit
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import AbstractAnalysisCreateForm, AnalysisCommonLayout
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = (
*AbstractAnalysisCreateForm.Meta.fields,
'quality_check', 'trim_adapter', 'rm_duplicate',
)
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = FormHelper()
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
FormActions(
Submit(
'save', _('Create New Analysis'), css_class='btn-lg',
)
)
)
helper.include_media = False
return helper
<commit_msg>Use analysis base class form helper and form building blocks<commit_after>from crispy_forms.helper import FormHelper
from crispy_forms.bootstrap import InlineField
from crispy_forms.layout import Div, Field, Fieldset, HTML, Layout
from django.utils.functional import cached_property
from django.utils.translation import ugettext_lazy as _
from analyses.forms import (
AbstractAnalysisCreateForm,
AnalysisCommonLayout,
AnalysisFormActions,
Include,
)
from .models import RNASeqModel
class RNASeqCreateForm(AbstractAnalysisCreateForm):
class Meta(AbstractAnalysisCreateForm.Meta):
model = RNASeqModel
fields = '__all__'
widgets = {
**AbstractAnalysisCreateForm.Meta.widgets,
}
@cached_property
def helper(self):
helper = super().helper
helper.layout = Layout(
AnalysisCommonLayout(analysis_type="RNA-Seq"),
Fieldset(
'Quality Check',
HTML(
"<p>Examine and process the quality of the sequencing "
"reads.</p>"
),
Field('quality_check'),
Field('trim_adapter'),
Field('rm_duplicate'),
),
AnalysisFormActions(),
)
return helper
|
44fbeeb82ce797f357de36979ff47f2dec9d70ca
|
graphene/commands/show_command.py
|
graphene/commands/show_command.py
|
from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
PrettyPrinter.print_list(type_list, "Type Name")
|
from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
if len(type_list) == 0:
print "No types found."
return
PrettyPrinter.print_list(type_list, "Type Name")
|
Update SHOW TYPES command to handle no types being created
|
Update SHOW TYPES command to handle no types being created
|
Python
|
apache-2.0
|
PHB-CS123/graphene,PHB-CS123/graphene,PHB-CS123/graphene
|
from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
PrettyPrinter.print_list(type_list, "Type Name")
Update SHOW TYPES command to handle no types being created
|
from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
if len(type_list) == 0:
print "No types found."
return
PrettyPrinter.print_list(type_list, "Type Name")
|
<commit_before>from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
PrettyPrinter.print_list(type_list, "Type Name")
<commit_msg>Update SHOW TYPES command to handle no types being created<commit_after>
|
from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
if len(type_list) == 0:
print "No types found."
return
PrettyPrinter.print_list(type_list, "Type Name")
|
from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
PrettyPrinter.print_list(type_list, "Type Name")
Update SHOW TYPES command to handle no types being createdfrom enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
if len(type_list) == 0:
print "No types found."
return
PrettyPrinter.print_list(type_list, "Type Name")
|
<commit_before>from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
PrettyPrinter.print_list(type_list, "Type Name")
<commit_msg>Update SHOW TYPES command to handle no types being created<commit_after>from enum import Enum
from graphene.commands.command import Command
from graphene.utils import PrettyPrinter
class ShowCommand(Command):
class ShowType(Enum):
TYPES = 1
RELATIONS = 2
def __init__(self, show_type):
self.show_type = show_type
def execute(self, storage_manager):
if self.show_type == ShowCommand.ShowType.TYPES:
i = 1
type_list = []
while True:
cur_type = storage_manager.type_manager.get_item_at_index(i)
if cur_type is None:
break
type_name = storage_manager.type_name_manager \
.read_name_at_index(cur_type.nameId)
type_list.append(type_name)
i += 1
if len(type_list) == 0:
print "No types found."
return
PrettyPrinter.print_list(type_list, "Type Name")
|
abf36e3a6ce9eb001b3501756b3d3d15bd49d5bc
|
jazzband/members/decorators.py
|
jazzband/members/decorators.py
|
from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if next_url is None:
next_url = default_url()
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
|
from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if next_url is None:
next_url = default_url()
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
|
Fix import time issue with member_required decorator.
|
Fix import time issue with member_required decorator.
|
Python
|
mit
|
jazzband/site,jazzband/jazzband-site,jazzband/jazzband-site,jazzband/website,jazzband/website,jazzband/site,jazzband/website,jazzband/website
|
from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if next_url is None:
next_url = default_url()
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
Fix import time issue with member_required decorator.
|
from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if next_url is None:
next_url = default_url()
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
|
<commit_before>from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if next_url is None:
next_url = default_url()
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
<commit_msg>Fix import time issue with member_required decorator.<commit_after>
|
from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if next_url is None:
next_url = default_url()
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
|
from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if next_url is None:
next_url = default_url()
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
Fix import time issue with member_required decorator.from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if next_url is None:
next_url = default_url()
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
|
<commit_before>from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if next_url is None:
next_url = default_url()
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
<commit_msg>Fix import time issue with member_required decorator.<commit_after>from flask import flash, redirect
from flask_login import current_user
import wrapt
from ..account.views import default_url
def member_required(next_url=None, message=None):
if message is None:
message = "Sorry but you're not a member of Jazzband at the moment."
@wrapt.decorator
def wrapper(wrapped, instance, args, kwargs):
"""
If you decorate a view with this, it will ensure that the current user is
a Jazzband member.
:param func: The view function to decorate.
:type func: function
"""
if next_url is None:
next_url = default_url()
if (
not current_user.is_member
or current_user.is_banned
or current_user.is_restricted
):
flash(message)
return redirect(next_url)
return wrapped(*args, **kwargs)
return wrapper
|
45896958badb2ff5f7c36a86a60fbdab80d2f618
|
plots/urls.py
|
plots/urls.py
|
__author__ = 'ankesh'
from django.conf.urls import patterns, url
|
__author__ = 'ankesh'
from django.conf.urls import patterns, url
from plots.views import rawdata, draw
urlpatterns = patterns('',
url(r'^(?P<type>[A-z]+)/$', draw, name='drawChart'),
url(r'^(?P<type>[A-z]+)/data/$', rawdata, name='rawdata'),
)
|
Add the URL mappings for the plots app.
|
Add the URL mappings for the plots app.
|
Python
|
bsd-2-clause
|
ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark
|
__author__ = 'ankesh'
from django.conf.urls import patterns, url
Add the URL mappings for the plots app.
|
__author__ = 'ankesh'
from django.conf.urls import patterns, url
from plots.views import rawdata, draw
urlpatterns = patterns('',
url(r'^(?P<type>[A-z]+)/$', draw, name='drawChart'),
url(r'^(?P<type>[A-z]+)/data/$', rawdata, name='rawdata'),
)
|
<commit_before>__author__ = 'ankesh'
from django.conf.urls import patterns, url
<commit_msg>Add the URL mappings for the plots app.<commit_after>
|
__author__ = 'ankesh'
from django.conf.urls import patterns, url
from plots.views import rawdata, draw
urlpatterns = patterns('',
url(r'^(?P<type>[A-z]+)/$', draw, name='drawChart'),
url(r'^(?P<type>[A-z]+)/data/$', rawdata, name='rawdata'),
)
|
__author__ = 'ankesh'
from django.conf.urls import patterns, url
Add the URL mappings for the plots app.__author__ = 'ankesh'
from django.conf.urls import patterns, url
from plots.views import rawdata, draw
urlpatterns = patterns('',
url(r'^(?P<type>[A-z]+)/$', draw, name='drawChart'),
url(r'^(?P<type>[A-z]+)/data/$', rawdata, name='rawdata'),
)
|
<commit_before>__author__ = 'ankesh'
from django.conf.urls import patterns, url
<commit_msg>Add the URL mappings for the plots app.<commit_after>__author__ = 'ankesh'
from django.conf.urls import patterns, url
from plots.views import rawdata, draw
urlpatterns = patterns('',
url(r'^(?P<type>[A-z]+)/$', draw, name='drawChart'),
url(r'^(?P<type>[A-z]+)/data/$', rawdata, name='rawdata'),
)
|
e69402647f0d3878ced159a3e8f7dbc386a8158f
|
reducer/__init__.py
|
reducer/__init__.py
|
# keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev8'
from .core import *
|
# keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev9'
from .core import *
|
Make version match that from setup.py
|
Make version match that from setup.py
|
Python
|
bsd-3-clause
|
mwcraig/reducer,astrofrog/reducer,astrofrog/reducer,StuartLittlefair/reducer,StuartLittlefair/reducer
|
# keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev8'
from .core import *
Make version match that from setup.py
|
# keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev9'
from .core import *
|
<commit_before># keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev8'
from .core import *
<commit_msg>Make version match that from setup.py<commit_after>
|
# keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev9'
from .core import *
|
# keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev8'
from .core import *
Make version match that from setup.py# keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev9'
from .core import *
|
<commit_before># keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev8'
from .core import *
<commit_msg>Make version match that from setup.py<commit_after># keep this at the top -- name is needed for imports to succeed
NOTEBOOK_TEMPLATE_NAME = 'reducer-template.ipynb'
__version__ = '0.1.dev9'
from .core import *
|
36021ba78d84dbb3aef8ea54369f88f6461eced6
|
history_rewrite_scripts/config.py
|
history_rewrite_scripts/config.py
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2214', 'refs/branch-heads/chromium/2214'),
('refs/branch-heads/2272', 'refs/branch-heads/chromium/2272'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
('refs/branch-heads/2357', 'refs/branch-heads/chromium/2357'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
|
Switch to 2311 + 2357 branches
|
Switch to 2311 + 2357 branches
|
Python
|
bsd-3-clause
|
primiano/chrome-blink-automerger
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2214', 'refs/branch-heads/chromium/2214'),
('refs/branch-heads/2272', 'refs/branch-heads/chromium/2272'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
Switch to 2311 + 2357 branches
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
('refs/branch-heads/2357', 'refs/branch-heads/chromium/2357'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2214', 'refs/branch-heads/chromium/2214'),
('refs/branch-heads/2272', 'refs/branch-heads/chromium/2272'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
<commit_msg>Switch to 2311 + 2357 branches<commit_after>
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
('refs/branch-heads/2357', 'refs/branch-heads/chromium/2357'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2214', 'refs/branch-heads/chromium/2214'),
('refs/branch-heads/2272', 'refs/branch-heads/chromium/2272'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
Switch to 2311 + 2357 branches# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
('refs/branch-heads/2357', 'refs/branch-heads/chromium/2357'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
|
<commit_before># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2214', 'refs/branch-heads/chromium/2214'),
('refs/branch-heads/2272', 'refs/branch-heads/chromium/2272'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
<commit_msg>Switch to 2311 + 2357 branches<commit_after># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
AUTOMERGER_NAME = 'Chromium+Blink automerger'
AUTOMERGER_EMAIL = 'chrome-blink-automerger@chromium.org'
BLINK_REPO_URL = 'https://chromium.googlesource.com/chromium/blink.git'
CHROMIUM_REPO_URL = 'https://chromium.googlesource.com/chromium/src.git'
# 'ref/in/chromium' -> 'ref/in/blink'
BRANCHES_TO_MERGE = [
('refs/heads/master', 'refs/heads/master'),
('refs/branch-heads/2311', 'refs/branch-heads/chromium/2311'),
('refs/branch-heads/2357', 'refs/branch-heads/chromium/2357'),
]
MERGE_MSG = """Merge Chromium + Blink git repositories
Chromium SHA1: %(chromium_sha)s
Chromium position: %(chromium_branch)s@{#%(chromium_pos)s}
Blink SHA1: %(blink_sha)s
Blink revision: %(blink_branch)s@%(blink_rev)s
BUG=431458
Cr-Commit-Position: %(chromium_branch)s@{#%(chromium_next_pos)s}
"""
|
ce5cc3d899ef6a07b46794bbcf689ca52e9d59ae
|
txircd/modules/core/channel_statuses.py
|
txircd/modules/core/channel_statuses.py
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"]][0]
statuses = StatusReport()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
if not channel.users[user]["status"]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"][0]][0]
statuses = StatusReport()
|
Fix check on the user's status when retrieving it
|
Fix check on the user's status when retrieving it
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"]][0]
statuses = StatusReport()Fix check on the user's status when retrieving it
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
if not channel.users[user]["status"]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"][0]][0]
statuses = StatusReport()
|
<commit_before>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"]][0]
statuses = StatusReport()<commit_msg>Fix check on the user's status when retrieving it<commit_after>
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
if not channel.users[user]["status"]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"][0]][0]
statuses = StatusReport()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"]][0]
statuses = StatusReport()Fix check on the user's status when retrieving itfrom twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
if not channel.users[user]["status"]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"][0]][0]
statuses = StatusReport()
|
<commit_before>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"]][0]
statuses = StatusReport()<commit_msg>Fix check on the user's status when retrieving it<commit_after>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class StatusReport(ModuleData):
implements(IPlugin, IModuleData)
name = "ChannelStatusReport"
core = True
def actions(self):
return [ ("channelstatuses", 1, self.statuses) ]
def statuses(self, channel, user):
if user not in channel.users:
return None
if not channel.users[user]:
return ""
if not channel.users[user]["status"]:
return ""
return self.ircd.channelStatuses[channel.users[user]["status"][0]][0]
statuses = StatusReport()
|
199a64fbed87a8ae43469bb48f8a4e16579f0b64
|
partner_coc/__openerp__.py
|
partner_coc/__openerp__.py
|
# -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'http://www.onestein.eu',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
|
# -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/partner-contact',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
|
Set website in manifest to OCA repository
|
[FIX] Set website in manifest to OCA repository
|
Python
|
agpl-3.0
|
acsone/partner-contact
|
# -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'http://www.onestein.eu',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
[FIX] Set website in manifest to OCA repository
|
# -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/partner-contact',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'http://www.onestein.eu',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
<commit_msg>[FIX] Set website in manifest to OCA repository<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/partner-contact',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
|
# -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'http://www.onestein.eu',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
[FIX] Set website in manifest to OCA repository# -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/partner-contact',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'http://www.onestein.eu',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
<commit_msg>[FIX] Set website in manifest to OCA repository<commit_after># -*- coding: utf-8 -*-
# Copyright 2018 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
'name': 'Partner CoC',
'summary': "Adds a field 'Chamber Of Commerce Registration Number' to "
"partner",
'version': '8.0.1.0.0',
'category': 'Web',
'author': 'Onestein,Odoo Community Association (OCA)',
'website': 'https://github.com/OCA/partner-contact',
'license': 'AGPL-3',
'depends': [
'partner_identification',
],
'data': [
'data/res_partner_id_category_data.xml',
'views/res_partner_view.xml',
],
'installable': True,
}
|
256661c4a78f1238fd032abd53185ed80f787f7f
|
incuna_test_utils/testcases.py
|
incuna_test_utils/testcases.py
|
from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', *, url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
|
from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
|
Remove python 3 syntax for python 2 compatibility
|
Remove python 3 syntax for python 2 compatibility
|
Python
|
bsd-2-clause
|
incuna/incuna-test-utils,incuna/incuna-test-utils
|
from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', *, url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
Remove python 3 syntax for python 2 compatibility
|
from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
|
<commit_before>from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', *, url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
<commit_msg>Remove python 3 syntax for python 2 compatibility<commit_after>
|
from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
|
from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', *, url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
Remove python 3 syntax for python 2 compatibilityfrom django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
|
<commit_before>from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', *, url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
<commit_msg>Remove python 3 syntax for python 2 compatibility<commit_after>from django.contrib.auth.models import AnonymousUser
from django.db.models.base import ModelBase
from django.test import TestCase
from rest_framework.test import APIRequestFactory, force_authenticate
from . import factories
class AbstractModelTestCase(TestCase):
"""
Base class for tests of model mixins. To use, subclass and specify
the mixin class variable. A model using the mixin will be made
available in self.model.
From http://michael.mior.ca/2012/01/14/unit-testing-django-model-mixins/
via http://stackoverflow.com/a/9678200/400691, modified as we don't need an
object in the database.
"""
def setUp(self):
# Create a dummy model which extends the mixin
self.model = ModelBase(
'__TestModel__' + self.mixin.__name__,
(self.mixin,),
{'__module__': self.mixin.__module__},
)
class APIRequestTestCase(TestCase):
user_factory = factories.UserFactory
def create_request(self, method='get', url='/', user=None, auth=True, **kwargs):
if not user:
if auth:
user = self.user_factory.create()
else:
user = AnonymousUser()
kwargs['format'] = 'json'
request = getattr(APIRequestFactory(), method)(url, **kwargs)
request.user = user
if auth:
force_authenticate(request, user)
if 'data' in kwargs:
request.DATA = kwargs['data']
return request
|
ecbdb0389feb18d30524ab071db69a184710954d
|
past/types/__init__.py
|
past/types/__init__.py
|
from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict as dict
from .oldstr import oldstr as str
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'dict', 'str', 'long', 'unicode']
|
from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict
from .oldstr import oldstr
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode']
|
Fix imports of past.builtins types
|
Fix imports of past.builtins types
|
Python
|
mit
|
michaelpacer/python-future,krischer/python-future,PythonCharmers/python-future,QuLogic/python-future,PythonCharmers/python-future,krischer/python-future,QuLogic/python-future,michaelpacer/python-future
|
from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict as dict
from .oldstr import oldstr as str
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'dict', 'str', 'long', 'unicode']
Fix imports of past.builtins types
|
from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict
from .oldstr import oldstr
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode']
|
<commit_before>from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict as dict
from .oldstr import oldstr as str
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'dict', 'str', 'long', 'unicode']
<commit_msg>Fix imports of past.builtins types<commit_after>
|
from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict
from .oldstr import oldstr
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode']
|
from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict as dict
from .oldstr import oldstr as str
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'dict', 'str', 'long', 'unicode']
Fix imports of past.builtins typesfrom past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict
from .oldstr import oldstr
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode']
|
<commit_before>from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict as dict
from .oldstr import oldstr as str
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'dict', 'str', 'long', 'unicode']
<commit_msg>Fix imports of past.builtins types<commit_after>from past import utils
if utils.PY2:
import __builtin__
basestring = __builtin__.basestring
dict = __builtin__.dict
str = __builtin__.str
long = __builtin__.long
unicode = __builtin__.unicode
__all__ = []
else:
from .basestring import basestring
from .olddict import olddict
from .oldstr import oldstr
long = int
unicode = str
# from .unicode import unicode
__all__ = ['basestring', 'olddict', 'oldstr', 'long', 'unicode']
|
e86901ac2b074d42d2e388353bbe60fcdd8f0240
|
wagtail/contrib/postgres_search/apps.py
|
wagtail/contrib/postgres_search/apps.py
|
from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
|
from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
default_auto_field = 'django.db.models.AutoField'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
|
Set default_auto_field in wagtail.contrib.postgres_search AppConfig
|
Set default_auto_field in wagtail.contrib.postgres_search AppConfig
Add default_auto_field = 'django.db.models.AutoField'
Co-authored-by: Nick Moreton <7f1a4658c80dbc9331efe1b3861c4063f4838748@torchbox.com>
|
Python
|
bsd-3-clause
|
jnns/wagtail,zerolab/wagtail,gasman/wagtail,gasman/wagtail,gasman/wagtail,rsalmaso/wagtail,rsalmaso/wagtail,thenewguy/wagtail,thenewguy/wagtail,jnns/wagtail,jnns/wagtail,rsalmaso/wagtail,wagtail/wagtail,mixxorz/wagtail,torchbox/wagtail,jnns/wagtail,gasman/wagtail,thenewguy/wagtail,thenewguy/wagtail,wagtail/wagtail,mixxorz/wagtail,zerolab/wagtail,torchbox/wagtail,wagtail/wagtail,mixxorz/wagtail,mixxorz/wagtail,zerolab/wagtail,torchbox/wagtail,mixxorz/wagtail,rsalmaso/wagtail,wagtail/wagtail,wagtail/wagtail,gasman/wagtail,zerolab/wagtail,thenewguy/wagtail,zerolab/wagtail,torchbox/wagtail,rsalmaso/wagtail
|
from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
Set default_auto_field in wagtail.contrib.postgres_search AppConfig
Add default_auto_field = 'django.db.models.AutoField'
Co-authored-by: Nick Moreton <7f1a4658c80dbc9331efe1b3861c4063f4838748@torchbox.com>
|
from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
default_auto_field = 'django.db.models.AutoField'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
|
<commit_before>from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
<commit_msg>Set default_auto_field in wagtail.contrib.postgres_search AppConfig
Add default_auto_field = 'django.db.models.AutoField'
Co-authored-by: Nick Moreton <7f1a4658c80dbc9331efe1b3861c4063f4838748@torchbox.com><commit_after>
|
from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
default_auto_field = 'django.db.models.AutoField'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
|
from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
Set default_auto_field in wagtail.contrib.postgres_search AppConfig
Add default_auto_field = 'django.db.models.AutoField'
Co-authored-by: Nick Moreton <7f1a4658c80dbc9331efe1b3861c4063f4838748@torchbox.com>from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
default_auto_field = 'django.db.models.AutoField'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
|
<commit_before>from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
<commit_msg>Set default_auto_field in wagtail.contrib.postgres_search AppConfig
Add default_auto_field = 'django.db.models.AutoField'
Co-authored-by: Nick Moreton <7f1a4658c80dbc9331efe1b3861c4063f4838748@torchbox.com><commit_after>from django.apps import AppConfig
from django.core.checks import Error, Tags, register
from .utils import get_postgresql_connections, set_weights
class PostgresSearchConfig(AppConfig):
name = 'wagtail.contrib.postgres_search'
default_auto_field = 'django.db.models.AutoField'
def ready(self):
@register(Tags.compatibility, Tags.database)
def check_if_postgresql(app_configs, **kwargs):
if get_postgresql_connections():
return []
return [Error('You must use a PostgreSQL database '
'to use PostgreSQL search.',
id='wagtail.contrib.postgres_search.E001')]
set_weights()
from .models import IndexEntry
IndexEntry.add_generic_relations()
|
fc60bdfe1ee3c4baef916532bb88aeb1787cd8c7
|
molo/core/api/constants.py
|
molo/core/api/constants.py
|
from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
|
from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
SECTION_SESSION_VARS = SESSION_VARS(
first=("url", "section_content_type"),
second="section_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
|
Add section session varialbes to facilitate redirects
|
Add section session varialbes to facilitate redirects
|
Python
|
bsd-2-clause
|
praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo
|
from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
Add section session varialbes to facilitate redirects
|
from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
SECTION_SESSION_VARS = SESSION_VARS(
first=("url", "section_content_type"),
second="section_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
|
<commit_before>from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
<commit_msg>Add section session varialbes to facilitate redirects<commit_after>
|
from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
SECTION_SESSION_VARS = SESSION_VARS(
first=("url", "section_content_type"),
second="section_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
|
from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
Add section session varialbes to facilitate redirectsfrom collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
SECTION_SESSION_VARS = SESSION_VARS(
first=("url", "section_content_type"),
second="section_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
|
<commit_before>from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
<commit_msg>Add section session varialbes to facilitate redirects<commit_after>from collections import namedtuple
CONTENT_TYPES = [
("core.ArticlePage", "Article"),
("core.SectionPage", "Section"),
]
ENDPOINTS = [
("page", "api/v1/pages")
]
SESSION_VARS = namedtuple(
"SESSION_VARS",
["first", "second", ]
)
ARTICLE_SESSION_VARS = SESSION_VARS(
first=("url", "article_content_type"),
second="article_parent_page_id"
)
SECTION_SESSION_VARS = SESSION_VARS(
first=("url", "section_content_type"),
second="section_parent_page_id"
)
API_PAGES_ENDPOINT = "/api/v2/pages/"
API_IMAGES_ENDPOINT = "/api/v2/images/"
KEYS_TO_EXCLUDE = ["id", "meta", ]
# Form error messages
MAIN_IMPORT_FORM_MESSAGES = {
"connection_error": "Please enter a valid URL.",
"bad_request": "Please try again later.",
}
|
cad23e7c73a8f33b7aa841d89d5311030d1c2262
|
databridge/helpers.py
|
databridge/helpers.py
|
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5,
pool_connections=100,
pool_maxsize=50)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get_doc(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
Change adapter; fix filter func
|
Change adapter; fix filter func
|
Python
|
apache-2.0
|
yshalenyk/databridge
|
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
Change adapter; fix filter func
|
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5,
pool_connections=100,
pool_maxsize=50)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get_doc(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
<commit_before>from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
<commit_msg>Change adapter; fix filter func<commit_after>
|
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5,
pool_connections=100,
pool_maxsize=50)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get_doc(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
Change adapter; fix filter funcfrom requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5,
pool_connections=100,
pool_maxsize=50)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get_doc(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
<commit_before>from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
<commit_msg>Change adapter; fix filter func<commit_after>from requests.adapters import HTTPAdapter
RetryAdapter = HTTPAdapter(max_retries=5,
pool_connections=100,
pool_maxsize=50)
def create_db_url(username, passwd, host, port):
if username and passwd:
cr = '{}:{}@'.format(username, passwd)
else:
cr = ''
return 'http://{}{}:{}/'.format(
cr, host, port
)
def save_or_update(db, doc):
if doc['id'] not in db:
doc['_id'] = doc['id']
db.save(doc)
else:
old = db.get(doc['id'])
old = doc
db.save(old)
def check_doc(db, feed_item):
if feed_item['id'] not in db:
return True
if db.get_doc(feed_item['id'])['dateModified'] < feed_item['dateModified']:
return True
return False
|
3f325e7820661313b69f6e410987caaff1ac7d96
|
python/VTK.py
|
python/VTK.py
|
"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
print "VTK Version", vtkVersion().GetVTKVersion()
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
|
"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
# try to load the python import/export classes if Numeric is available
try:
from Numeric import *
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
except ImportError:
pass
|
Remove dependancy on vtkVersion Place a try/except around modules that required Numeric
|
FIX: Remove dependancy on vtkVersion
Place a try/except around modules that required Numeric
|
Python
|
bsd-3-clause
|
sumedhasingla/VTK,aashish24/VTK-old,mspark93/VTK,hendradarwin/VTK,cjh1/VTK,gram526/VTK,SimVascular/VTK,demarle/VTK,Wuteyan/VTK,msmolens/VTK,SimVascular/VTK,SimVascular/VTK,jeffbaumes/jeffbaumes-vtk,sankhesh/VTK,ashray/VTK-EVM,SimVascular/VTK,biddisco/VTK,demarle/VTK,spthaolt/VTK,sumedhasingla/VTK,candy7393/VTK,mspark93/VTK,sumedhasingla/VTK,SimVascular/VTK,ashray/VTK-EVM,aashish24/VTK-old,hendradarwin/VTK,ashray/VTK-EVM,sankhesh/VTK,biddisco/VTK,candy7393/VTK,johnkit/vtk-dev,keithroe/vtkoptix,berendkleinhaneveld/VTK,jmerkow/VTK,candy7393/VTK,aashish24/VTK-old,cjh1/VTK,collects/VTK,aashish24/VTK-old,naucoin/VTKSlicerWidgets,berendkleinhaneveld/VTK,johnkit/vtk-dev,sgh/vtk,sumedhasingla/VTK,arnaudgelas/VTK,biddisco/VTK,msmolens/VTK,naucoin/VTKSlicerWidgets,berendkleinhaneveld/VTK,jeffbaumes/jeffbaumes-vtk,cjh1/VTK,msmolens/VTK,jmerkow/VTK,spthaolt/VTK,ashray/VTK-EVM,sgh/vtk,sankhesh/VTK,daviddoria/PointGraphsPhase1,mspark93/VTK,biddisco/VTK,ashray/VTK-EVM,arnaudgelas/VTK,keithroe/vtkoptix,demarle/VTK,spthaolt/VTK,spthaolt/VTK,johnkit/vtk-dev,hendradarwin/VTK,keithroe/vtkoptix,msmolens/VTK,SimVascular/VTK,arnaudgelas/VTK,demarle/VTK,mspark93/VTK,sumedhasingla/VTK,candy7393/VTK,daviddoria/PointGraphsPhase1,gram526/VTK,hendradarwin/VTK,jeffbaumes/jeffbaumes-vtk,hendradarwin/VTK,jmerkow/VTK,jeffbaumes/jeffbaumes-vtk,sankhesh/VTK,spthaolt/VTK,Wuteyan/VTK,Wuteyan/VTK,biddisco/VTK,sankhesh/VTK,collects/VTK,arnaudgelas/VTK,berendkleinhaneveld/VTK,candy7393/VTK,ashray/VTK-EVM,johnkit/vtk-dev,naucoin/VTKSlicerWidgets,aashish24/VTK-old,collects/VTK,sankhesh/VTK,jmerkow/VTK,cjh1/VTK,naucoin/VTKSlicerWidgets,gram526/VTK,johnkit/vtk-dev,gram526/VTK,gram526/VTK,spthaolt/VTK,gram526/VTK,candy7393/VTK,demarle/VTK,Wuteyan/VTK,collects/VTK,daviddoria/PointGraphsPhase1,sgh/vtk,mspark93/VTK,gram526/VTK,ashray/VTK-EVM,hendradarwin/VTK,daviddoria/PointGraphsPhase1,biddisco/VTK,arnaudgelas/VTK,demarle/VTK,naucoin/VTKSlicerWidgets,ashray/VTK-EVM,berendkleinhaneveld/VTK,msmolens/VTK,collects/VTK,msmolens/VTK,sgh/vtk,naucoin/VTKSlicerWidgets,msmolens/VTK,sumedhasingla/VTK,berendkleinhaneveld/VTK,collects/VTK,mspark93/VTK,johnkit/vtk-dev,aashish24/VTK-old,Wuteyan/VTK,sumedhasingla/VTK,biddisco/VTK,keithroe/vtkoptix,johnkit/vtk-dev,jmerkow/VTK,demarle/VTK,demarle/VTK,msmolens/VTK,Wuteyan/VTK,sumedhasingla/VTK,arnaudgelas/VTK,jeffbaumes/jeffbaumes-vtk,jmerkow/VTK,sgh/vtk,candy7393/VTK,jmerkow/VTK,cjh1/VTK,SimVascular/VTK,berendkleinhaneveld/VTK,spthaolt/VTK,daviddoria/PointGraphsPhase1,mspark93/VTK,SimVascular/VTK,sgh/vtk,keithroe/vtkoptix,daviddoria/PointGraphsPhase1,sankhesh/VTK,candy7393/VTK,jmerkow/VTK,keithroe/vtkoptix,keithroe/vtkoptix,gram526/VTK,Wuteyan/VTK,keithroe/vtkoptix,sankhesh/VTK,jeffbaumes/jeffbaumes-vtk,cjh1/VTK,hendradarwin/VTK,mspark93/VTK
|
"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
print "VTK Version", vtkVersion().GetVTKVersion()
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
FIX: Remove dependancy on vtkVersion
Place a try/except around modules that required Numeric
|
"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
# try to load the python import/export classes if Numeric is available
try:
from Numeric import *
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
except ImportError:
pass
|
<commit_before>"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
print "VTK Version", vtkVersion().GetVTKVersion()
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
<commit_msg>FIX: Remove dependancy on vtkVersion
Place a try/except around modules that required Numeric<commit_after>
|
"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
# try to load the python import/export classes if Numeric is available
try:
from Numeric import *
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
except ImportError:
pass
|
"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
print "VTK Version", vtkVersion().GetVTKVersion()
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
FIX: Remove dependancy on vtkVersion
Place a try/except around modules that required Numeric"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
# try to load the python import/export classes if Numeric is available
try:
from Numeric import *
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
except ImportError:
pass
|
<commit_before>"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
print "VTK Version", vtkVersion().GetVTKVersion()
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
<commit_msg>FIX: Remove dependancy on vtkVersion
Place a try/except around modules that required Numeric<commit_after>"""
VTK.py
An VTK module for python that includes:
Wrappers for all the VTK classes that are wrappable
A Tkinter vtkRenderWidget (works like the tcl vtkTkRenderWidget)
The vtkImageViewerWidget and vtkImageWindowWidget are coming soon.
Classes to assist in moving data between python and VTK.
"""
from vtkpython import *
from vtkConstants import *
from vtkRenderWidget import vtkRenderWidget,vtkTkRenderWidget
#from vtkImageWindowWidget import vtkImageWindowWidget,vtkTkImageWindowWidget
#from vtkImageViewerWidget import vtkImageViewerWidget,vtkTkImageViewerWidget
# try to load the python import/export classes if Numeric is available
try:
from Numeric import *
from vtkImageImportFromArray import *
from vtkImageExportToArray import *
except ImportError:
pass
|
ef011470ad361ca50b638461935d344392976821
|
pywwt/misc.py
|
pywwt/misc.py
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
try:
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
except AttributeError:
error = soup.html.body.h2.string
raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
Handle other kinds of errors.
|
Handle other kinds of errors.
|
Python
|
bsd-3-clause
|
jzuhone/pywwt,vga101/pywwt,vga101/pywwt,jzuhone/pywwt
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
Handle other kinds of errors.
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
try:
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
except AttributeError:
error = soup.html.body.h2.string
raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
<commit_before>from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
<commit_msg>Handle other kinds of errors.<commit_after>
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
try:
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
except AttributeError:
error = soup.html.body.h2.string
raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
Handle other kinds of errors.from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
try:
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
except AttributeError:
error = soup.html.body.h2.string
raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
<commit_before>from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
<commit_msg>Handle other kinds of errors.<commit_after>from bs4 import BeautifulSoup
class WWTException(Exception):
pass
def handle_response(resp_str):
soup = BeautifulSoup(resp_str)
try:
success = soup.layerapi.status.string
if success != "Success":
raise WWTException(success)
except AttributeError:
error = soup.html.body.h2.string
raise WWTException(error)
def parse_kwargs(params, kwargs):
if "date_time" in kwargs:
params["datetime"] = kwargs["date_time"]
if "time_rate" in kwargs:
params["timerate"] = str(kwargs["time_rate"])
if "fly_to" in kwargs:
params["flyto"] = ",".join([str(i) for i in kwargs["fly_to"]])
if "instant" in kwargs:
params["instant"] = str(kwargs["instant"])
if "auto_loop" in kwargs:
params["autoloop"] = str(kwargs["auto_loop"])
|
73eacdde5067e60f40af000237d198748c5b3cc7
|
PYNWapp/PYNWsite/models.py
|
PYNWapp/PYNWsite/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
class Meta:
verbose_name_plural = 'Categories'
|
Fix plural name for Categories model.
|
Fix plural name for Categories model.
|
Python
|
mit
|
PythonNorthwestEngland/pynw-website,PythonNorthwestEngland/pynw-website
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
Fix plural name for Categories model.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
class Meta:
verbose_name_plural = 'Categories'
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
<commit_msg>Fix plural name for Categories model.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
class Meta:
verbose_name_plural = 'Categories'
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
Fix plural name for Categories model.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
class Meta:
verbose_name_plural = 'Categories'
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
<commit_msg>Fix plural name for Categories model.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Event(models.Model):
name = models.CharField(max_length=200)
location = models.CharField(max_length=300)
event_date = models.DateTimeField('event date')
description = models.TextField()
def __str__(self):
return self.name
def is_future(self):
return self.event_date > timezone.now()
class Post(models.Model):
title = models.CharField(max_length=100, unique=True)
slug = models.SlugField(max_length=100, unique=True)
body = models.TextField()
posted = models.DateField(db_index=True, auto_now_add=True)
category = models.ForeignKey('Category')
class Category(models.Model):
title = models.CharField(max_length=100, db_index=True)
slug = models.SlugField(max_length=100, db_index=True)
class Meta:
verbose_name_plural = 'Categories'
|
1ccb1e5aa3dbf7b39f807c125a753cb44664cd56
|
src/armet/connectors/django/__init__.py
|
src/armet/connectors/django/__init__.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImportError:
# Failed to import django; or, we don't have a proper settings
# file.
return False
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
except ImportError:
# Failed to import django
return False
# Import the exception we might get
from django.core.exceptions import ImproperlyConfigured
try:
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImproperlyConfigured:
# We don't have an available settings file; django is actually in use.
return False
|
Fix is_available method in django; proper exception handling for use checking.
|
Fix is_available method in django; proper exception handling for use checking.
|
Python
|
mit
|
armet/python-armet
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImportError:
# Failed to import django; or, we don't have a proper settings
# file.
return False
Fix is_available method in django; proper exception handling for use checking.
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
except ImportError:
# Failed to import django
return False
# Import the exception we might get
from django.core.exceptions import ImproperlyConfigured
try:
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImproperlyConfigured:
# We don't have an available settings file; django is actually in use.
return False
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImportError:
# Failed to import django; or, we don't have a proper settings
# file.
return False
<commit_msg>Fix is_available method in django; proper exception handling for use checking.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
except ImportError:
# Failed to import django
return False
# Import the exception we might get
from django.core.exceptions import ImproperlyConfigured
try:
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImproperlyConfigured:
# We don't have an available settings file; django is actually in use.
return False
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImportError:
# Failed to import django; or, we don't have a proper settings
# file.
return False
Fix is_available method in django; proper exception handling for use checking.# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
except ImportError:
# Failed to import django
return False
# Import the exception we might get
from django.core.exceptions import ImproperlyConfigured
try:
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImproperlyConfigured:
# We don't have an available settings file; django is actually in use.
return False
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImportError:
# Failed to import django; or, we don't have a proper settings
# file.
return False
<commit_msg>Fix is_available method in django; proper exception handling for use checking.<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
__all__ = [
]
def is_available(*capacities):
"""
Detects if the environment is available for use in
the (optionally) specified capacities.
"""
try:
# Attempted import.
import django
except ImportError:
# Failed to import django
return False
# Import the exception we might get
from django.core.exceptions import ImproperlyConfigured
try:
# Now try and use it.
from django.conf import settings
settings.DEBUG
# Detected connector.
return True
except ImproperlyConfigured:
# We don't have an available settings file; django is actually in use.
return False
|
a31a46053df5e0b86b07b95ef5f460dcb2c12f5f
|
poppy/transport/app.py
|
poppy/transport/app.py
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
app = bootstrap.Bootstrap(conf).transport.app
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
import os
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
if os.environ.get('POPPY_CONFIG_FILE') is not None:
conf.default_config_files.insert(os.environ.get('POPPY_CONFIG_FILE'), 0)
app = bootstrap.Bootstrap(conf).transport.app
|
Add extra config file env variable option
|
Add extra config file env variable option
Change-Id: Ic88b442098eff0c2e3a8cc3cb527fa3d29f085ea
|
Python
|
apache-2.0
|
stackforge/poppy,stackforge/poppy,openstack/poppy,openstack/poppy,openstack/poppy,stackforge/poppy
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
app = bootstrap.Bootstrap(conf).transport.app
Add extra config file env variable option
Change-Id: Ic88b442098eff0c2e3a8cc3cb527fa3d29f085ea
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
import os
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
if os.environ.get('POPPY_CONFIG_FILE') is not None:
conf.default_config_files.insert(os.environ.get('POPPY_CONFIG_FILE'), 0)
app = bootstrap.Bootstrap(conf).transport.app
|
<commit_before># Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
app = bootstrap.Bootstrap(conf).transport.app
<commit_msg>Add extra config file env variable option
Change-Id: Ic88b442098eff0c2e3a8cc3cb527fa3d29f085ea<commit_after>
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
import os
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
if os.environ.get('POPPY_CONFIG_FILE') is not None:
conf.default_config_files.insert(os.environ.get('POPPY_CONFIG_FILE'), 0)
app = bootstrap.Bootstrap(conf).transport.app
|
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
app = bootstrap.Bootstrap(conf).transport.app
Add extra config file env variable option
Change-Id: Ic88b442098eff0c2e3a8cc3cb527fa3d29f085ea# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
import os
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
if os.environ.get('POPPY_CONFIG_FILE') is not None:
conf.default_config_files.insert(os.environ.get('POPPY_CONFIG_FILE'), 0)
app = bootstrap.Bootstrap(conf).transport.app
|
<commit_before># Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
app = bootstrap.Bootstrap(conf).transport.app
<commit_msg>Add extra config file env variable option
Change-Id: Ic88b442098eff0c2e3a8cc3cb527fa3d29f085ea<commit_after># Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""WSGI callable for WSGI containers
This app should be used by external WSGI
containers. For example:
$ gunicorn poppy.transport.app:app
NOTE: As for external containers, it is necessary
to put config files in the standard paths. There's
no common way to specify / pass configuration files
to the WSGI app when it is called from other apps.
"""
import os
from oslo_config import cfg
from poppy import bootstrap
conf = cfg.CONF
conf(project='poppy', prog='poppy', args=[])
if os.environ.get('POPPY_CONFIG_FILE') is not None:
conf.default_config_files.insert(os.environ.get('POPPY_CONFIG_FILE'), 0)
app = bootstrap.Bootstrap(conf).transport.app
|
53a93d4d1c0029e5d616e225b1b86672b1e0f7c8
|
falafel/mappers/hostname.py
|
falafel/mappers/hostname.py
|
from .. import Mapper, mapper
@mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
def parse_content(self, content):
fqdn = None
if len(content) == 1:
fqdn = content[0].strip()
elif len(content) > 1:
for line in content:
if line.startswith('fqdn'):
fqdn = line.split()[-1]
self.fqdn = fqdn
self.hostname = fqdn.split(".")[0] if fqdn else None
self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
|
from .. import Mapper, mapper
@mapper("hostname")
class Hostname(Mapper):
"""Class for parsing ``hostname`` command output.
Attributes:
fqdn: The fully qualified domain name of the host. The same to
``hostname`` when domain part is not set.
hostname: The hostname.
domain: The domain get from the fqdn.
"""
def parse_content(self, content):
raw = None
if len(content) == 1:
raw = content[0].strip()
self.fqdn = raw
self.hostname = raw.split(".")[0] if raw else None
self.domain = ".".join(raw.split(".")[1:]) if raw else None
|
Remove the decorate `facts` from mapper `Hostname`
|
Remove the decorate `facts` from mapper `Hostname`
- And update the class comment
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
from .. import Mapper, mapper
@mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
def parse_content(self, content):
fqdn = None
if len(content) == 1:
fqdn = content[0].strip()
elif len(content) > 1:
for line in content:
if line.startswith('fqdn'):
fqdn = line.split()[-1]
self.fqdn = fqdn
self.hostname = fqdn.split(".")[0] if fqdn else None
self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
Remove the decorate `facts` from mapper `Hostname`
- And update the class comment
|
from .. import Mapper, mapper
@mapper("hostname")
class Hostname(Mapper):
"""Class for parsing ``hostname`` command output.
Attributes:
fqdn: The fully qualified domain name of the host. The same to
``hostname`` when domain part is not set.
hostname: The hostname.
domain: The domain get from the fqdn.
"""
def parse_content(self, content):
raw = None
if len(content) == 1:
raw = content[0].strip()
self.fqdn = raw
self.hostname = raw.split(".")[0] if raw else None
self.domain = ".".join(raw.split(".")[1:]) if raw else None
|
<commit_before>from .. import Mapper, mapper
@mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
def parse_content(self, content):
fqdn = None
if len(content) == 1:
fqdn = content[0].strip()
elif len(content) > 1:
for line in content:
if line.startswith('fqdn'):
fqdn = line.split()[-1]
self.fqdn = fqdn
self.hostname = fqdn.split(".")[0] if fqdn else None
self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
<commit_msg>Remove the decorate `facts` from mapper `Hostname`
- And update the class comment<commit_after>
|
from .. import Mapper, mapper
@mapper("hostname")
class Hostname(Mapper):
"""Class for parsing ``hostname`` command output.
Attributes:
fqdn: The fully qualified domain name of the host. The same to
``hostname`` when domain part is not set.
hostname: The hostname.
domain: The domain get from the fqdn.
"""
def parse_content(self, content):
raw = None
if len(content) == 1:
raw = content[0].strip()
self.fqdn = raw
self.hostname = raw.split(".")[0] if raw else None
self.domain = ".".join(raw.split(".")[1:]) if raw else None
|
from .. import Mapper, mapper
@mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
def parse_content(self, content):
fqdn = None
if len(content) == 1:
fqdn = content[0].strip()
elif len(content) > 1:
for line in content:
if line.startswith('fqdn'):
fqdn = line.split()[-1]
self.fqdn = fqdn
self.hostname = fqdn.split(".")[0] if fqdn else None
self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
Remove the decorate `facts` from mapper `Hostname`
- And update the class commentfrom .. import Mapper, mapper
@mapper("hostname")
class Hostname(Mapper):
"""Class for parsing ``hostname`` command output.
Attributes:
fqdn: The fully qualified domain name of the host. The same to
``hostname`` when domain part is not set.
hostname: The hostname.
domain: The domain get from the fqdn.
"""
def parse_content(self, content):
raw = None
if len(content) == 1:
raw = content[0].strip()
self.fqdn = raw
self.hostname = raw.split(".")[0] if raw else None
self.domain = ".".join(raw.split(".")[1:]) if raw else None
|
<commit_before>from .. import Mapper, mapper
@mapper("facts")
@mapper("hostname")
class Hostname(Mapper):
def parse_content(self, content):
fqdn = None
if len(content) == 1:
fqdn = content[0].strip()
elif len(content) > 1:
for line in content:
if line.startswith('fqdn'):
fqdn = line.split()[-1]
self.fqdn = fqdn
self.hostname = fqdn.split(".")[0] if fqdn else None
self.domain = ".".join(fqdn.split(".")[1:]) if fqdn else None
<commit_msg>Remove the decorate `facts` from mapper `Hostname`
- And update the class comment<commit_after>from .. import Mapper, mapper
@mapper("hostname")
class Hostname(Mapper):
"""Class for parsing ``hostname`` command output.
Attributes:
fqdn: The fully qualified domain name of the host. The same to
``hostname`` when domain part is not set.
hostname: The hostname.
domain: The domain get from the fqdn.
"""
def parse_content(self, content):
raw = None
if len(content) == 1:
raw = content[0].strip()
self.fqdn = raw
self.hostname = raw.split(".")[0] if raw else None
self.domain = ".".join(raw.split(".")[1:]) if raw else None
|
a76c7ddc80c3896dd4397b4713de267001706722
|
thefederation/migrations/0020_remove_port_from_node_hostnames.py
|
thefederation/migrations/0020_remove_port_from_node_hostnames.py
|
# Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
|
# Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations, IntegrityError
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
try:
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
|
Make port removing migrating a bit less flaky
|
Make port removing migrating a bit less flaky
|
Python
|
agpl-3.0
|
jaywink/the-federation.info,jaywink/the-federation.info,jaywink/the-federation.info
|
# Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
Make port removing migrating a bit less flaky
|
# Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations, IntegrityError
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
try:
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
|
<commit_before># Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
<commit_msg>Make port removing migrating a bit less flaky<commit_after>
|
# Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations, IntegrityError
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
try:
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
|
# Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
Make port removing migrating a bit less flaky# Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations, IntegrityError
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
try:
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
|
<commit_before># Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
<commit_msg>Make port removing migrating a bit less flaky<commit_after># Generated by Django 2.0.13 on 2019-12-29 21:11
from django.db import migrations, IntegrityError
from django.db.migrations import RunPython
def forward(apps, schema):
Node = apps.get_model("thefederation", "Node")
for node in Node.objects.filter(host__contains=":"):
node.host = node.host.split(":")[0]
if node.name.split(':')[0] == node.host:
node.name = node.host
try:
Node.objects.filter(id=node.id).update(host=node.host, name=node.name)
except IntegrityError:
pass
class Migration(migrations.Migration):
dependencies = [
('thefederation', '0019_add_some_defaults_for_node_organization_fields'),
]
operations = [
RunPython(forward, RunPython.noop)
]
|
f85a252b44a30f8b793e77c3bf7188ea8058217a
|
keras/mixed_precision/__init__.py
|
keras/mixed_precision/__init__.py
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
from keras.mixed_precision.loss_scale_optimizer import LossScaleOptimizer
from keras.mixed_precision.policy import global_policy
from keras.mixed_precision.policy import Policy
from keras.mixed_precision.policy import set_global_policy
|
Make mixed precision API available in `keras.mixed_precision`.
|
Make mixed precision API available in `keras.mixed_precision`.
PiperOrigin-RevId: 433886558
|
Python
|
apache-2.0
|
keras-team/keras,keras-team/keras
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
Make mixed precision API available in `keras.mixed_precision`.
PiperOrigin-RevId: 433886558
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
from keras.mixed_precision.loss_scale_optimizer import LossScaleOptimizer
from keras.mixed_precision.policy import global_policy
from keras.mixed_precision.policy import Policy
from keras.mixed_precision.policy import set_global_policy
|
<commit_before># Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
<commit_msg>Make mixed precision API available in `keras.mixed_precision`.
PiperOrigin-RevId: 433886558<commit_after>
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
from keras.mixed_precision.loss_scale_optimizer import LossScaleOptimizer
from keras.mixed_precision.policy import global_policy
from keras.mixed_precision.policy import Policy
from keras.mixed_precision.policy import set_global_policy
|
# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
Make mixed precision API available in `keras.mixed_precision`.
PiperOrigin-RevId: 433886558# Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
from keras.mixed_precision.loss_scale_optimizer import LossScaleOptimizer
from keras.mixed_precision.policy import global_policy
from keras.mixed_precision.policy import Policy
from keras.mixed_precision.policy import set_global_policy
|
<commit_before># Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
<commit_msg>Make mixed precision API available in `keras.mixed_precision`.
PiperOrigin-RevId: 433886558<commit_after># Copyright 2020 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras mixed precision API.
See [the mixed precision guide](
https://www.tensorflow.org/guide/keras/mixed_precision) to learn how to
use the API.
"""
from keras.mixed_precision.loss_scale_optimizer import LossScaleOptimizer
from keras.mixed_precision.policy import global_policy
from keras.mixed_precision.policy import Policy
from keras.mixed_precision.policy import set_global_policy
|
22465e0ae238a6584a8549796f4dfbae21db73dc
|
ooni/tests/test_geoip.py
|
ooni/tests/test_geoip.py
|
import os
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni.settings import config
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
|
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
def test_geoip_database_version(self):
version = geoip.database_version()
assert 'GeoIP' in version.keys()
assert 'GeoIPASNum' in version.keys()
assert 'GeoLiteCity' in version.keys()
assert len(version['GeoIP']['sha256']) == 64
assert isinstance(version['GeoIP']['timestamp'], float)
assert len(version['GeoIPASNum']['sha256']) == 64
assert isinstance(version['GeoIPASNum']['timestamp'], float)
|
Add unittests for geoip database version
|
Add unittests for geoip database version
|
Python
|
bsd-2-clause
|
juga0/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe
|
import os
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni.settings import config
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
Add unittests for geoip database version
|
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
def test_geoip_database_version(self):
version = geoip.database_version()
assert 'GeoIP' in version.keys()
assert 'GeoIPASNum' in version.keys()
assert 'GeoLiteCity' in version.keys()
assert len(version['GeoIP']['sha256']) == 64
assert isinstance(version['GeoIP']['timestamp'], float)
assert len(version['GeoIPASNum']['sha256']) == 64
assert isinstance(version['GeoIPASNum']['timestamp'], float)
|
<commit_before>import os
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni.settings import config
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
<commit_msg>Add unittests for geoip database version<commit_after>
|
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
def test_geoip_database_version(self):
version = geoip.database_version()
assert 'GeoIP' in version.keys()
assert 'GeoIPASNum' in version.keys()
assert 'GeoLiteCity' in version.keys()
assert len(version['GeoIP']['sha256']) == 64
assert isinstance(version['GeoIP']['timestamp'], float)
assert len(version['GeoIPASNum']['sha256']) == 64
assert isinstance(version['GeoIPASNum']['timestamp'], float)
|
import os
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni.settings import config
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
Add unittests for geoip database version
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
def test_geoip_database_version(self):
version = geoip.database_version()
assert 'GeoIP' in version.keys()
assert 'GeoIPASNum' in version.keys()
assert 'GeoLiteCity' in version.keys()
assert len(version['GeoIP']['sha256']) == 64
assert isinstance(version['GeoIP']['timestamp'], float)
assert len(version['GeoIPASNum']['sha256']) == 64
assert isinstance(version['GeoIPASNum']['timestamp'], float)
|
<commit_before>import os
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni.settings import config
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
<commit_msg>Add unittests for geoip database version<commit_after>
from twisted.internet import defer
from twisted.trial import unittest
from ooni.tests import is_internet_connected
from ooni import geoip
class TestGeoIP(unittest.TestCase):
def test_ip_to_location(self):
location = geoip.IPToLocation('8.8.8.8')
assert 'countrycode' in location
assert 'asn' in location
assert 'city' in location
@defer.inlineCallbacks
def test_probe_ip(self):
if not is_internet_connected():
self.skipTest(
"You must be connected to the internet to run this test"
)
probe_ip = geoip.ProbeIP()
res = yield probe_ip.lookup()
assert len(res.split('.')) == 4
def test_geoip_database_version(self):
version = geoip.database_version()
assert 'GeoIP' in version.keys()
assert 'GeoIPASNum' in version.keys()
assert 'GeoLiteCity' in version.keys()
assert len(version['GeoIP']['sha256']) == 64
assert isinstance(version['GeoIP']['timestamp'], float)
assert len(version['GeoIPASNum']['sha256']) == 64
assert isinstance(version['GeoIPASNum']['timestamp'], float)
|
4b202e540babd4044d5ff311a60e97fb8b9f066a
|
examples/dot_graph.py
|
examples/dot_graph.py
|
# Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print graph.to_dot()
|
# Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print(graph.to_dot())
|
Fix Python3-incompatible use of print.
|
Fix Python3-incompatible use of print.
|
Python
|
apache-2.0
|
mdickinson/refcycle
|
# Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print graph.to_dot()
Fix Python3-incompatible use of print.
|
# Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print(graph.to_dot())
|
<commit_before># Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print graph.to_dot()
<commit_msg>Fix Python3-incompatible use of print.<commit_after>
|
# Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print(graph.to_dot())
|
# Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print graph.to_dot()
Fix Python3-incompatible use of print.# Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print(graph.to_dot())
|
<commit_before># Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print graph.to_dot()
<commit_msg>Fix Python3-incompatible use of print.<commit_after># Copyright 2013 Mark Dickinson
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from refcycle import cycles_created_by
class A(object):
pass
def create_cycles():
a, b, c = A(), A(), A()
a.foo = b
b.foo = a
a.bar = c
graph = cycles_created_by(create_cycles)
print(graph.to_dot())
|
706c2cba61d73a818d389ed709012687e71a9379
|
python/rez/cli/__init__.py
|
python/rez/cli/__init__.py
|
'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
|
'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg=''):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
|
Fix a bug where rez.cli.output() errors when called without args. should produce an empty line of output.
|
Fix a bug where rez.cli.output() errors when called without args. should produce an empty line of output.
|
Python
|
apache-2.0
|
instinct-vfx/rez,nerdvegas/rez,nerdvegas/rez,instinct-vfx/rez
|
'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
Fix a bug where rez.cli.output() errors when called without args. should produce an empty line of output.
|
'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg=''):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
|
<commit_before>'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
<commit_msg>Fix a bug where rez.cli.output() errors when called without args. should produce an empty line of output.<commit_after>
|
'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg=''):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
|
'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
Fix a bug where rez.cli.output() errors when called without args. should produce an empty line of output.'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg=''):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
|
<commit_before>'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
<commit_msg>Fix a bug where rez.cli.output() errors when called without args. should produce an empty line of output.<commit_after>'''
Utilities for cli tools
'''
import sys
def error(msg):
'''
An error, formatted and printed to stderr
'''
sys.__stderr__.write("Error: %s\n" % msg)
def output(msg=''):
'''
A result, printed to stdout
'''
sys.__stdout__.write("%s\n" % msg)
def redirect_to_stderr(func):
'''
decorator to redirect output to stderr.
This is useful
'''
def wrapper(*args, **kwargs):
try:
# redirect all print statements to stderr
sys.stdout = sys.stderr
return func(*args, **kwargs)
finally:
sys.stdout = sys.__stdout__
wrapper.__name__ = func.__name__
wrapper.__doc__ = func.__doc__
return wrapper
|
0413977c6f45799599fbd4f197c3c42ef0d0835f
|
queryexpander/expansion.py
|
queryexpander/expansion.py
|
import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
|
import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file, centroids_file_path)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
|
Fix C++ accelerator constructor invocation
|
Fix C++ accelerator constructor invocation
|
Python
|
mit
|
konraddysput/BioDocumentAnalysis
|
import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
Fix C++ accelerator constructor invocation
|
import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file, centroids_file_path)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
|
<commit_before>import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
<commit_msg>Fix C++ accelerator constructor invocation<commit_after>
|
import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file, centroids_file_path)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
|
import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
Fix C++ accelerator constructor invocationimport numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file, centroids_file_path)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
|
<commit_before>import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
<commit_msg>Fix C++ accelerator constructor invocation<commit_after>import numpy as np
import pandas as pd
from typing import Tuple, List
from queryexpander.semantic_similarity import CppSemanticSimilarity
class QueryExpander:
def __init__(self, vocabulary_path: str, vocabulary_length: int, sums_cache_file: str, centroids_file_path: str):
self._words: List[str] = pd.read_csv(
vocabulary_path, sep=' ', quoting=3, header=None, usecols=(0,), na_filter=False).values.squeeze().tolist()
self._vectors: np.ndarray = pd.read_csv(vocabulary_path, sep=' ', quoting=3, header=None,
usecols=range(1, vocabulary_length + 1), na_filter=False,
dtype=np.float32).values
self._similarity = CppSemanticSimilarity(self._words, self._vectors, sums_cache_file, centroids_file_path)
def generate_sums_cache(self):
self._similarity.generate_sums_cache()
def generate_local_centroids(self, centroids_neighbourhood_size: int):
self._similarity.generate_local_centroids(centroids_neighbourhood_size)
def find_most_similar_words(self, query: List[str], number_of_results: int) -> List[Tuple[str, float]]:
return self._similarity.find_most_similar_words(query, number_of_results)
|
4acb0d36db6777704df7e3bff6c95f38f47ce49a
|
src/django_email_user_model/backends.py
|
src/django_email_user_model/backends.py
|
from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, username=None, password=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, email=None, password=None, username=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=(email or username))
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
Update auth backend to support kwargs
|
Update auth backend to support kwargs
|
Python
|
mit
|
jeffbuttars/django-email-user-model
|
from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, username=None, password=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
Update auth backend to support kwargs
|
from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, email=None, password=None, username=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=(email or username))
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
<commit_before>from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, username=None, password=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
<commit_msg>Update auth backend to support kwargs<commit_after>
|
from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, email=None, password=None, username=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=(email or username))
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, username=None, password=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
Update auth backend to support kwargsfrom django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, email=None, password=None, username=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=(email or username))
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
<commit_before>from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, username=None, password=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=username)
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
<commit_msg>Update auth backend to support kwargs<commit_after>from django.contrib.auth import get_user_model
class EmailAuthBackend(object):
"""Docstring for EmailAuthBackend """
def authenticate(self, email=None, password=None, username=None):
"""todo: Docstring for authenticate
:param username: arg description
:type username: type description
:param password: arg description
:type password: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
user = user_model.objects.get(email=(email or username))
if user.check_password(password):
return user
except user_model.DoesNotExist:
return None
def get_user(self, user_id):
"""todo: Docstring for get_user
:param user_id: arg description
:type user_id: type description
:return:
:rtype:
"""
user_model = get_user_model()
try:
return user_model.objects.get(pk=user_id)
except user_model.DoesNotExist:
return None
|
047483d9897e75f8284c39e8477a285763da7b37
|
heufybot/modules/util/commandhandler.py
|
heufybot/modules/util/commandhandler.py
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
if not message["body"].startswith(commandPrefix):
return # We don't need to be handling things that aren't bot commands
params = message["body"].split()
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
botNick = self.bot.servers[message["server"]].nick.lower()
params = message["body"].split()
if message["body"].startswith(commandPrefix):
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
elif message["body"].lower().startswith(botNick):
message["command"] = params[1]
del params[0:2]
else:
return # We don't need to be handling things that aren't bot commands
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
Make the bot respond to its name
|
Make the bot respond to its name
Implements GH-7
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
if not message["body"].startswith(commandPrefix):
return # We don't need to be handling things that aren't bot commands
params = message["body"].split()
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
Make the bot respond to its name
Implements GH-7
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
botNick = self.bot.servers[message["server"]].nick.lower()
params = message["body"].split()
if message["body"].startswith(commandPrefix):
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
elif message["body"].lower().startswith(botNick):
message["command"] = params[1]
del params[0:2]
else:
return # We don't need to be handling things that aren't bot commands
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
<commit_before>from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
if not message["body"].startswith(commandPrefix):
return # We don't need to be handling things that aren't bot commands
params = message["body"].split()
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
<commit_msg>Make the bot respond to its name
Implements GH-7<commit_after>
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
botNick = self.bot.servers[message["server"]].nick.lower()
params = message["body"].split()
if message["body"].startswith(commandPrefix):
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
elif message["body"].lower().startswith(botNick):
message["command"] = params[1]
del params[0:2]
else:
return # We don't need to be handling things that aren't bot commands
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
if not message["body"].startswith(commandPrefix):
return # We don't need to be handling things that aren't bot commands
params = message["body"].split()
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
Make the bot respond to its name
Implements GH-7from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
botNick = self.bot.servers[message["server"]].nick.lower()
params = message["body"].split()
if message["body"].startswith(commandPrefix):
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
elif message["body"].lower().startswith(botNick):
message["command"] = params[1]
del params[0:2]
else:
return # We don't need to be handling things that aren't bot commands
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
<commit_before>from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
if not message["body"].startswith(commandPrefix):
return # We don't need to be handling things that aren't bot commands
params = message["body"].split()
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
<commit_msg>Make the bot respond to its name
Implements GH-7<commit_after>from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from zope.interface import implements
class CommandHandler(BotModule):
implements(IPlugin, IBotModule)
name = "CommandHandler"
def actions(self):
return [ ("message-channel", 1, self.handleChannelMessage),
("message-user", 1, self.handlePrivateMessage) ]
def handleChannelMessage(self, server, channel, user, messageBody):
message = {
"server": server,
"source": channel.name,
"channel": channel,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def handlePrivateMessage(self, server, user, messageBody):
message = {
"server": server,
"source": user.nick,
"user": user,
"body": messageBody
}
self._handleCommand(message)
def _handleCommand(self, message):
commandPrefix = self.bot.config.serverItemWithDefault(message["server"], "command_prefix", "!")
botNick = self.bot.servers[message["server"]].nick.lower()
params = message["body"].split()
if message["body"].startswith(commandPrefix):
message["command"] = params[0][params[0].index(commandPrefix) + len(commandPrefix):]
del params[0]
elif message["body"].lower().startswith(botNick):
message["command"] = params[1]
del params[0:2]
else:
return # We don't need to be handling things that aren't bot commands
message["params"] = params
self.bot.moduleHandler.runProcessingAction("botmessage", message)
commandHandler = CommandHandler()
|
cbbf178a59561e828214ff88e0c73ec0716fa926
|
tests/test_ensure_do_cleanups.py
|
tests/test_ensure_do_cleanups.py
|
from unittesting import DeferrableTestCase
class TestDoCleanups(DeferrableTestCase):
def test_ensure_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
|
from unittesting import DeferrableTestCase
class TestExplicitDoCleanups(DeferrableTestCase):
def test_manually_calling_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
cleanup_called = []
class TestImplicitDoCleanupsOnTeardown(DeferrableTestCase):
def test_a_prepare(self):
self.addCleanup(lambda: cleanup_called.append(1))
def test_b_assert(self):
self.assertEqual(cleanup_called, [1])
|
Test implicit `doCleanups` on tearDown
|
Test implicit `doCleanups` on tearDown
|
Python
|
mit
|
randy3k/UnitTesting,randy3k/UnitTesting,randy3k/UnitTesting,randy3k/UnitTesting
|
from unittesting import DeferrableTestCase
class TestDoCleanups(DeferrableTestCase):
def test_ensure_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
Test implicit `doCleanups` on tearDown
|
from unittesting import DeferrableTestCase
class TestExplicitDoCleanups(DeferrableTestCase):
def test_manually_calling_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
cleanup_called = []
class TestImplicitDoCleanupsOnTeardown(DeferrableTestCase):
def test_a_prepare(self):
self.addCleanup(lambda: cleanup_called.append(1))
def test_b_assert(self):
self.assertEqual(cleanup_called, [1])
|
<commit_before>from unittesting import DeferrableTestCase
class TestDoCleanups(DeferrableTestCase):
def test_ensure_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
<commit_msg>Test implicit `doCleanups` on tearDown<commit_after>
|
from unittesting import DeferrableTestCase
class TestExplicitDoCleanups(DeferrableTestCase):
def test_manually_calling_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
cleanup_called = []
class TestImplicitDoCleanupsOnTeardown(DeferrableTestCase):
def test_a_prepare(self):
self.addCleanup(lambda: cleanup_called.append(1))
def test_b_assert(self):
self.assertEqual(cleanup_called, [1])
|
from unittesting import DeferrableTestCase
class TestDoCleanups(DeferrableTestCase):
def test_ensure_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
Test implicit `doCleanups` on tearDownfrom unittesting import DeferrableTestCase
class TestExplicitDoCleanups(DeferrableTestCase):
def test_manually_calling_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
cleanup_called = []
class TestImplicitDoCleanupsOnTeardown(DeferrableTestCase):
def test_a_prepare(self):
self.addCleanup(lambda: cleanup_called.append(1))
def test_b_assert(self):
self.assertEqual(cleanup_called, [1])
|
<commit_before>from unittesting import DeferrableTestCase
class TestDoCleanups(DeferrableTestCase):
def test_ensure_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
<commit_msg>Test implicit `doCleanups` on tearDown<commit_after>from unittesting import DeferrableTestCase
class TestExplicitDoCleanups(DeferrableTestCase):
def test_manually_calling_do_cleanups_works(self):
messages = []
def work(message):
messages.append(message)
self.addCleanup(work, 1)
yield from self.doCleanups()
self.assertEqual(messages, [1])
cleanup_called = []
class TestImplicitDoCleanupsOnTeardown(DeferrableTestCase):
def test_a_prepare(self):
self.addCleanup(lambda: cleanup_called.append(1))
def test_b_assert(self):
self.assertEqual(cleanup_called, [1])
|
27f5676656e7507883ba365d2639e5f3cb5b0b58
|
snippets/keras_testing.py
|
snippets/keras_testing.py
|
from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:,3:4]
x_train = training[:,0:3]
y_test = test[:,3:4]
x_test = test[:,0:3]
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=100)
print('Test score: ', model.evaluate(x_test, y_test))
y_network = model.predict(x_test)
out = np.concatenate((x_test, y_test, y_network), axis=1)
np.savetxt('results/%s_kera.csv' % input_filename, out, delimiter=',')
if __name__ == "__main__":
main()
|
from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
num_networks = int(sys.argv[2])
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:, 3:4]
x_train = training[:, 0:3]
y_test = test[:, 3:4]
x_test = test[:, 0:3]
test_score = 0
result = np.zeros((1,5))
for _ in range(num_networks):
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=20, shuffle=True)
y_network = model.predict_on_batch(x_test)
result = np.concatenate((result, np.concatenate((x_test, y_test, y_network), axis=1)), axis=0)
test_score += model.evaluate(x_test, y_test)
print()
print('Test score: ', test_score / num_networks)
result = np.delete(result, 0, 0)
np.savetxt('results/%s_kera.csv' % input_filename, result, delimiter=',')
if __name__ == "__main__":
main()
|
Tweak parameters and allow runs over multiple networks
|
Tweak parameters and allow runs over multiple networks
|
Python
|
mit
|
farthir/msc-project
|
from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:,3:4]
x_train = training[:,0:3]
y_test = test[:,3:4]
x_test = test[:,0:3]
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=100)
print('Test score: ', model.evaluate(x_test, y_test))
y_network = model.predict(x_test)
out = np.concatenate((x_test, y_test, y_network), axis=1)
np.savetxt('results/%s_kera.csv' % input_filename, out, delimiter=',')
if __name__ == "__main__":
main()
Tweak parameters and allow runs over multiple networks
|
from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
num_networks = int(sys.argv[2])
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:, 3:4]
x_train = training[:, 0:3]
y_test = test[:, 3:4]
x_test = test[:, 0:3]
test_score = 0
result = np.zeros((1,5))
for _ in range(num_networks):
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=20, shuffle=True)
y_network = model.predict_on_batch(x_test)
result = np.concatenate((result, np.concatenate((x_test, y_test, y_network), axis=1)), axis=0)
test_score += model.evaluate(x_test, y_test)
print()
print('Test score: ', test_score / num_networks)
result = np.delete(result, 0, 0)
np.savetxt('results/%s_kera.csv' % input_filename, result, delimiter=',')
if __name__ == "__main__":
main()
|
<commit_before>from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:,3:4]
x_train = training[:,0:3]
y_test = test[:,3:4]
x_test = test[:,0:3]
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=100)
print('Test score: ', model.evaluate(x_test, y_test))
y_network = model.predict(x_test)
out = np.concatenate((x_test, y_test, y_network), axis=1)
np.savetxt('results/%s_kera.csv' % input_filename, out, delimiter=',')
if __name__ == "__main__":
main()
<commit_msg>Tweak parameters and allow runs over multiple networks<commit_after>
|
from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
num_networks = int(sys.argv[2])
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:, 3:4]
x_train = training[:, 0:3]
y_test = test[:, 3:4]
x_test = test[:, 0:3]
test_score = 0
result = np.zeros((1,5))
for _ in range(num_networks):
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=20, shuffle=True)
y_network = model.predict_on_batch(x_test)
result = np.concatenate((result, np.concatenate((x_test, y_test, y_network), axis=1)), axis=0)
test_score += model.evaluate(x_test, y_test)
print()
print('Test score: ', test_score / num_networks)
result = np.delete(result, 0, 0)
np.savetxt('results/%s_kera.csv' % input_filename, result, delimiter=',')
if __name__ == "__main__":
main()
|
from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:,3:4]
x_train = training[:,0:3]
y_test = test[:,3:4]
x_test = test[:,0:3]
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=100)
print('Test score: ', model.evaluate(x_test, y_test))
y_network = model.predict(x_test)
out = np.concatenate((x_test, y_test, y_network), axis=1)
np.savetxt('results/%s_kera.csv' % input_filename, out, delimiter=',')
if __name__ == "__main__":
main()
Tweak parameters and allow runs over multiple networksfrom keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
num_networks = int(sys.argv[2])
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:, 3:4]
x_train = training[:, 0:3]
y_test = test[:, 3:4]
x_test = test[:, 0:3]
test_score = 0
result = np.zeros((1,5))
for _ in range(num_networks):
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=20, shuffle=True)
y_network = model.predict_on_batch(x_test)
result = np.concatenate((result, np.concatenate((x_test, y_test, y_network), axis=1)), axis=0)
test_score += model.evaluate(x_test, y_test)
print()
print('Test score: ', test_score / num_networks)
result = np.delete(result, 0, 0)
np.savetxt('results/%s_kera.csv' % input_filename, result, delimiter=',')
if __name__ == "__main__":
main()
|
<commit_before>from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:,3:4]
x_train = training[:,0:3]
y_test = test[:,3:4]
x_test = test[:,0:3]
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=100)
print('Test score: ', model.evaluate(x_test, y_test))
y_network = model.predict(x_test)
out = np.concatenate((x_test, y_test, y_network), axis=1)
np.savetxt('results/%s_kera.csv' % input_filename, out, delimiter=',')
if __name__ == "__main__":
main()
<commit_msg>Tweak parameters and allow runs over multiple networks<commit_after>from keras.models import Sequential
from keras.layers import Dense, Dropout
import sys
import numpy as np
def main():
input_filename = sys.argv[1]
num_networks = int(sys.argv[2])
training = np.loadtxt('data/%s.csv' % input_filename, delimiter=',')
test = np.loadtxt('data/%s_test.csv' % input_filename, delimiter=',')
y_train = training[:, 3:4]
x_train = training[:, 0:3]
y_test = test[:, 3:4]
x_test = test[:, 0:3]
test_score = 0
result = np.zeros((1,5))
for _ in range(num_networks):
model = Sequential()
model.add(Dense(10, activation='tanh', input_dim=3))
model.add(Dense(10, activation='tanh'))
model.add(Dense(1, activation='linear'))
model.compile(optimizer='sgd', loss='mean_squared_error')
model.fit(x_train, y_train, epochs=20, shuffle=True)
y_network = model.predict_on_batch(x_test)
result = np.concatenate((result, np.concatenate((x_test, y_test, y_network), axis=1)), axis=0)
test_score += model.evaluate(x_test, y_test)
print()
print('Test score: ', test_score / num_networks)
result = np.delete(result, 0, 0)
np.savetxt('results/%s_kera.csv' % input_filename, result, delimiter=',')
if __name__ == "__main__":
main()
|
8b1516e638244824b1eafed7dc4abb2dc087ec74
|
nuts/nuts.py
|
nuts/nuts.py
|
#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
Add colorama for coloring on windows
|
Add colorama for coloring on windows
Add the module colorama that makes ANSI escape character sequences work under MS Windows. The coloring is used to give a better overview about the testresults
|
Python
|
mit
|
HSRNetwork/Nuts
|
#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
Add colorama for coloring on windows
Add the module colorama that makes ANSI escape character sequences work under MS Windows. The coloring is used to give a better overview about the testresults
|
#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
<commit_msg>Add colorama for coloring on windows
Add the module colorama that makes ANSI escape character sequences work under MS Windows. The coloring is used to give a better overview about the testresults<commit_after>
|
#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
Add colorama for coloring on windows
Add the module colorama that makes ANSI escape character sequences work under MS Windows. The coloring is used to give a better overview about the testresults#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
<commit_before>#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
<commit_msg>Add colorama for coloring on windows
Add the module colorama that makes ANSI escape character sequences work under MS Windows. The coloring is used to give a better overview about the testresults<commit_after>#!/usr/bin/env python2
import os
import sys
import argparse
import logging
import datetime
import colorama
from src.application.Logger import Logger
from src.application.ValidationController import ValidationController
from src.application.TestController import TestController
def main(argv):
colorama.init()
logger = Logger()
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input", help="Start with a Testfile", nargs=1)
parser.add_argument("-v", "--validate", help="Validates Testfile", nargs=1, )
args = parser.parse_args()
if args.input:
validator = ValidationController(os.getcwd() + "/" + args.input[0])
if validator.logic():
tester = TestController(os.getcwd() + "/" + args.input[0])
tester.logic()
elif args.validate:
validator = ValidationController(os.getcwd() + "/" + args.validate[0])
validator.logic()
if __name__ == "__main__":
main(sys.argv[1:])
|
37fd9a33d840d309e0b42239e86ceda08b1425c2
|
scripts/list_migrations.py
|
scripts/list_migrations.py
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
from alembic.script import ScriptDirectory
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
import warnings
from alembic.script import ScriptDirectory
warnings.simplefilter('error')
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
|
Abort migrations check if a version is present more than once
|
Abort migrations check if a version is present more than once
list_migrations checks whether we have more than one branch in the
list of migration versions. Since we've switched to a new revision
naming convention, pull requests open at the same time are likely
to use the same revision number when adding new migrations (ie if
the current latest migration is '500' two pull requests adding a
new one are both likely to use '510' for their revision id). When
this happens, alembic excludes on of the migrations from the tree
and doesn't consider it to be a multiple branches issue. Instead,
it prints out a warning. In order to make sure the tests fail when
this happens we transform the warning into an exception inside the
list_migrations script. This is similar to running the python
interpreter with `-W error`.
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
from alembic.script import ScriptDirectory
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
Abort migrations check if a version is present more than once
list_migrations checks whether we have more than one branch in the
list of migration versions. Since we've switched to a new revision
naming convention, pull requests open at the same time are likely
to use the same revision number when adding new migrations (ie if
the current latest migration is '500' two pull requests adding a
new one are both likely to use '510' for their revision id). When
this happens, alembic excludes on of the migrations from the tree
and doesn't consider it to be a multiple branches issue. Instead,
it prints out a warning. In order to make sure the tests fail when
this happens we transform the warning into an exception inside the
list_migrations script. This is similar to running the python
interpreter with `-W error`.
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
import warnings
from alembic.script import ScriptDirectory
warnings.simplefilter('error')
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
|
<commit_before>#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
from alembic.script import ScriptDirectory
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
<commit_msg>Abort migrations check if a version is present more than once
list_migrations checks whether we have more than one branch in the
list of migration versions. Since we've switched to a new revision
naming convention, pull requests open at the same time are likely
to use the same revision number when adding new migrations (ie if
the current latest migration is '500' two pull requests adding a
new one are both likely to use '510' for their revision id). When
this happens, alembic excludes on of the migrations from the tree
and doesn't consider it to be a multiple branches issue. Instead,
it prints out a warning. In order to make sure the tests fail when
this happens we transform the warning into an exception inside the
list_migrations script. This is similar to running the python
interpreter with `-W error`.<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
import warnings
from alembic.script import ScriptDirectory
warnings.simplefilter('error')
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
from alembic.script import ScriptDirectory
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
Abort migrations check if a version is present more than once
list_migrations checks whether we have more than one branch in the
list of migration versions. Since we've switched to a new revision
naming convention, pull requests open at the same time are likely
to use the same revision number when adding new migrations (ie if
the current latest migration is '500' two pull requests adding a
new one are both likely to use '510' for their revision id). When
this happens, alembic excludes on of the migrations from the tree
and doesn't consider it to be a multiple branches issue. Instead,
it prints out a warning. In order to make sure the tests fail when
this happens we transform the warning into an exception inside the
list_migrations script. This is similar to running the python
interpreter with `-W error`.#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
import warnings
from alembic.script import ScriptDirectory
warnings.simplefilter('error')
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
|
<commit_before>#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
from alembic.script import ScriptDirectory
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
<commit_msg>Abort migrations check if a version is present more than once
list_migrations checks whether we have more than one branch in the
list of migration versions. Since we've switched to a new revision
naming convention, pull requests open at the same time are likely
to use the same revision number when adding new migrations (ie if
the current latest migration is '500' two pull requests adding a
new one are both likely to use '510' for their revision id). When
this happens, alembic excludes on of the migrations from the tree
and doesn't consider it to be a multiple branches issue. Instead,
it prints out a warning. In order to make sure the tests fail when
this happens we transform the warning into an exception inside the
list_migrations script. This is similar to running the python
interpreter with `-W error`.<commit_after>#!/usr/bin/env python
# encoding: utf-8
from __future__ import print_function
import sys
import warnings
from alembic.script import ScriptDirectory
warnings.simplefilter('error')
def detect_heads(migrations):
heads = migrations.get_heads()
return heads
def version_history(migrations):
version_history = [
(m.revision, m.doc) for m in migrations.walk_revisions()
]
version_history.reverse()
return version_history
def main(migrations_path):
migrations = ScriptDirectory(migrations_path)
heads = detect_heads(migrations)
if len(heads) > 1:
print("Migrations directory has multiple heads due to branching: {}".format(heads), file=sys.stderr)
sys.exit(1)
for version in version_history(migrations):
print("{:35} {}".format(*version))
if __name__ == '__main__':
main('migrations/')
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.