commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
74010276715e3570ad6f66144f2c2e31aff8948a
tests/test_local.py
tests/test_local.py
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 def test_can_decorate_method(self, mocker): tracker = mocker.Mock() class A(object): @cachelper.memoize() def calculate(self, x, y): tracker() return x + y a1 = A() assert a1.calculate(1, 2) == 3 assert a1.calculate(1, 2) == 3 assert tracker.call_count == 1 a2 = A() assert a2.calculate(1, 2) == 3 assert tracker.call_count == 2
Add test to make sure memoize works for methods
Add test to make sure memoize works for methods
Python
mit
suzaku/cachelper
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 Add test to make sure memoize works for methods
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 def test_can_decorate_method(self, mocker): tracker = mocker.Mock() class A(object): @cachelper.memoize() def calculate(self, x, y): tracker() return x + y a1 = A() assert a1.calculate(1, 2) == 3 assert a1.calculate(1, 2) == 3 assert tracker.call_count == 1 a2 = A() assert a2.calculate(1, 2) == 3 assert tracker.call_count == 2
<commit_before>import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 <commit_msg>Add test to make sure memoize works for methods<commit_after>
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 def test_can_decorate_method(self, mocker): tracker = mocker.Mock() class A(object): @cachelper.memoize() def calculate(self, x, y): tracker() return x + y a1 = A() assert a1.calculate(1, 2) == 3 assert a1.calculate(1, 2) == 3 assert tracker.call_count == 1 a2 = A() assert a2.calculate(1, 2) == 3 assert tracker.call_count == 2
import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 Add test to make sure memoize works for methodsimport cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 def test_can_decorate_method(self, mocker): tracker = mocker.Mock() class A(object): @cachelper.memoize() def calculate(self, x, y): tracker() return x + y a1 = A() assert a1.calculate(1, 2) == 3 assert a1.calculate(1, 2) == 3 assert tracker.call_count == 1 a2 = A() assert a2.calculate(1, 2) == 3 assert tracker.call_count == 2
<commit_before>import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 <commit_msg>Add test to make sure memoize works for methods<commit_after>import cachelper class TestMemorize: def test_should_cache_return_value(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' cached = cachelper.memoize()(func) assert cached(2) == 4 assert cached(2) == 4 assert func.call_count == 1 assert cached(4) == 8 assert cached(4) == 8 assert func.call_count == 2 def test_can_clear_cache(self, mocker): func = mocker.Mock() func.side_effect = lambda i: i * 2 func.__name__ = 'double' decorator = cachelper.memoize() cached = decorator(func) cached(10) cached.clear_cachelper_cache() cached(10) assert func.call_count == 2 def test_can_decorate_method(self, mocker): tracker = mocker.Mock() class A(object): @cachelper.memoize() def calculate(self, x, y): tracker() return x + y a1 = A() assert a1.calculate(1, 2) == 3 assert a1.calculate(1, 2) == 3 assert tracker.call_count == 1 a2 = A() assert a2.calculate(1, 2) == 3 assert tracker.call_count == 2
5f3491b583599148dba71dac5279f4ef6eb77c10
tests/test_suite.py
tests/test_suite.py
#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main())
#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test from . import source_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) res.addTest(loader.loadTestsFromModule(source_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main())
Update test suite generator to import tests in source_test.
Update test suite generator to import tests in source_test.
Python
mit
jkawamoto/roadie-gcp,jkawamoto/roadie-gcp
#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main()) Update test suite generator to import tests in source_test.
#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test from . import source_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) res.addTest(loader.loadTestsFromModule(source_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main())
<commit_before>#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main()) <commit_msg>Update test suite generator to import tests in source_test.<commit_after>
#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test from . import source_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) res.addTest(loader.loadTestsFromModule(source_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main())
#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main()) Update test suite generator to import tests in source_test.#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test from . import source_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) res.addTest(loader.loadTestsFromModule(source_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main())
<commit_before>#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main()) <commit_msg>Update test suite generator to import tests in source_test.<commit_after>#! /usr/bin/env python # # test_suite.py # # Copyright (c) 2015-2016 Junpei Kawamoto # # This software is released under the MIT License. # # http://opensource.org/licenses/mit-license.php # """ Test suite. """ from __future__ import absolute_import import sys import unittest from . import downloader_test from . import source_test def suite(): """ Return a test suite. """ loader = unittest.TestLoader() res = unittest.TestSuite() res.addTest(loader.loadTestsFromModule(downloader_test)) res.addTest(loader.loadTestsFromModule(source_test)) return res def main(): """ The main function. Returns: exit code. """ try: res = unittest.TextTestRunner(verbosity=2).run(suite()) except KeyboardInterrupt: return -1 else: return 0 if res.wasSuccessful() else 1 if __name__ == "__main__": sys.exit(main())
b987306d23c6d7ee42fad25e4c1451c865320a49
examples/example.py
examples/example.py
# Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.append('..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main()
# Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.insert(0, '..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main()
Prepend path so we always get the local copy.
Prepend path so we always get the local copy.
Python
lgpl-2.1
lazka/pgi,lazka/pgi
# Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.append('..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main() Prepend path so we always get the local copy.
# Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.insert(0, '..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main()
<commit_before># Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.append('..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main() <commit_msg>Prepend path so we always get the local copy.<commit_after>
# Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.insert(0, '..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main()
# Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.append('..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main() Prepend path so we always get the local copy.# Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.insert(0, '..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main()
<commit_before># Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.append('..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main() <commit_msg>Prepend path so we always get the local copy.<commit_after># Copyright 2012 Christoph Reiter # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. import sys sys.path.insert(0, '..') from pgi.repository import Gtk if __name__ == '__main__': b = Gtk.Window("Helllloooo") b.show_all() Gtk.main()
a10ffe519c50bd248bd9bfcde648f66e15fb6fd3
node_bridge.py
node_bridge.py
import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout
import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += os.path.expanduser('~/n/bin') env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout
Add support for `n` Node.js version manager
Add support for `n` Node.js version manager
Python
mit
hudochenkov/sublime-postcss-sorting,hudochenkov/sublime-postcss-sorting
import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout Add support for `n` Node.js version manager
import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += os.path.expanduser('~/n/bin') env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout
<commit_before>import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout <commit_msg>Add support for `n` Node.js version manager<commit_after>
import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += os.path.expanduser('~/n/bin') env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout
import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout Add support for `n` Node.js version managerimport os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += os.path.expanduser('~/n/bin') env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout
<commit_before>import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout <commit_msg>Add support for `n` Node.js version manager<commit_after>import os import platform import subprocess IS_OSX = platform.system() == 'Darwin' IS_WINDOWS = platform.system() == 'Windows' def node_bridge(data, bin, args=[]): env = None startupinfo = None if IS_OSX: # GUI apps in OS X doesn't contain .bashrc/.zshrc set paths env = os.environ.copy() env['PATH'] += os.path.expanduser('~/n/bin') env['PATH'] += ':/usr/local/bin' if IS_WINDOWS: startupinfo = subprocess.STARTUPINFO() startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW try: p = subprocess.Popen(['node', bin] + args, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env, startupinfo=startupinfo) except OSError: raise Exception('Couldn\'t find Node.js. Make sure it\'s in your $PATH by running `node -v` in your command-line.') stdout, stderr = p.communicate(input=data.encode('utf-8')) stdout = stdout.decode('utf-8') stderr = stderr.decode('utf-8') if stderr: raise Exception('Error: %s' % stderr) else: return stdout
6edd80b47eb5e84ac8d7a711b687c404616c4c6f
kqueen_ui/server.py
kqueen_ui/server.py
from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000)
from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) # allow override of backend urls from env variables kqueen_api_url = os.getenv('KQUEEN_API_URL', app.config['KQUEEN_API_URL']) kqueen_auth_url = os.getenv('KQUEEN_AUTH_URL', app.config['KQUEEN_AUTH_URL']) app.config.update( KQUEEN_API_URL=kqueen_api_url, KQUEEN_AUTH_URL=kqueen_auth_url ) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000)
Allow override of backend urls from env variables
Allow override of backend urls from env variables
Python
mit
atengler/kqueen-ui,atengler/kqueen-ui,atengler/kqueen-ui,atengler/kqueen-ui
from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000) Allow override of backend urls from env variables
from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) # allow override of backend urls from env variables kqueen_api_url = os.getenv('KQUEEN_API_URL', app.config['KQUEEN_API_URL']) kqueen_auth_url = os.getenv('KQUEEN_AUTH_URL', app.config['KQUEEN_AUTH_URL']) app.config.update( KQUEEN_API_URL=kqueen_api_url, KQUEEN_AUTH_URL=kqueen_auth_url ) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000)
<commit_before>from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000) <commit_msg>Allow override of backend urls from env variables<commit_after>
from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) # allow override of backend urls from env variables kqueen_api_url = os.getenv('KQUEEN_API_URL', app.config['KQUEEN_API_URL']) kqueen_auth_url = os.getenv('KQUEEN_AUTH_URL', app.config['KQUEEN_AUTH_URL']) app.config.update( KQUEEN_API_URL=kqueen_api_url, KQUEEN_AUTH_URL=kqueen_auth_url ) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000)
from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000) Allow override of backend urls from env variablesfrom flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) # allow override of backend urls from env variables kqueen_api_url = os.getenv('KQUEEN_API_URL', app.config['KQUEEN_API_URL']) kqueen_auth_url = os.getenv('KQUEEN_AUTH_URL', app.config['KQUEEN_AUTH_URL']) app.config.update( KQUEEN_API_URL=kqueen_api_url, KQUEEN_AUTH_URL=kqueen_auth_url ) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000)
<commit_before>from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000) <commit_msg>Allow override of backend urls from env variables<commit_after>from flask import Flask from flask import redirect from flask import url_for from kqueen_ui.blueprints.ui.views import ui from werkzeug.contrib.cache import SimpleCache import logging import os logger = logging.getLogger(__name__) cache = SimpleCache() config_file = os.environ.get('KQUEEN_CONFIG_FILE', 'config/dev.py') def create_app(config_file=config_file): app = Flask(__name__, static_folder='./asset/static') app.register_blueprint(ui, url_prefix='/ui') # load configuration if app.config.from_pyfile(config_file): logger.info('Loading configuration from {}'.format(config_file)) else: raise Exception('Config file {} could not be loaded.'.format(config_file)) # allow override of backend urls from env variables kqueen_api_url = os.getenv('KQUEEN_API_URL', app.config['KQUEEN_API_URL']) kqueen_auth_url = os.getenv('KQUEEN_AUTH_URL', app.config['KQUEEN_AUTH_URL']) app.config.update( KQUEEN_API_URL=kqueen_api_url, KQUEEN_AUTH_URL=kqueen_auth_url ) return app app = create_app() app.logger.setLevel(logging.INFO) @app.route('/') def root(): return redirect(url_for('ui.index'), code=302) def run(): logger.debug('kqueen_ui starting') app.run(port=8000)
744d4a34fa3f5f514f3f1e525822360dd97e28e2
astroquery/ogle/tests/test_ogle_remote.py
astroquery/ogle/tests/test_ogle_remote.py
import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2]
import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2]
Fix column name in tests
Fix column name in tests
Python
bsd-3-clause
imbasimba/astroquery,imbasimba/astroquery
import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] Fix column name in tests
import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2]
<commit_before> import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] <commit_msg>Fix column name in tests<commit_after>
import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2]
import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] Fix column name in tests import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2]
<commit_before> import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA/Lon'][0] == response['RA/Lon'][1] == response['RA/Lon'][2] <commit_msg>Fix column name in tests<commit_after> import pytest import astropy.units as u from astropy.coordinates import SkyCoord from .. import Ogle @pytest.mark.remote_data def test_ogle_single(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') response = Ogle.query_region(coord=co) assert len(response) == 1 @pytest.mark.remote_data def test_ogle_list(): co = SkyCoord(0, 3, unit=(u.degree, u.degree), frame='galactic') co_list = [co, co, co] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2] @pytest.mark.remote_data def test_ogle_list_values(): co_list = [[0, 0, 0], [3, 3, 3]] response = Ogle.query_region(coord=co_list) assert len(response) == 3 assert response['RA[hr]'][0] == response['RA[hr]'][1] == response['RA[hr]'][2]
315aedbfff9e345b1e4a4ffab999741bb9a802da
oopconcepts.py
oopconcepts.py
class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) p1 = Person("Allan") p1.say_hello() p2 = Person("John") p2.say_hello()
class Classroom: def __init__(self): self._people = [] def add_person(self, person): self._people.append(person) def remove_person(self, person): self._people.remove(person) def greet(self): for person in self._people: person.say_hello() class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) room = Classroom() room.add_person(Person("Scott")) room.add_person(Person("John")) room.add_person(Person("Paul")) room.greet()
Create classroom and some encapsulation
Create classroom and some encapsulation
Python
mit
cunctat0r/pythonstudy
class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) p1 = Person("Allan") p1.say_hello() p2 = Person("John") p2.say_hello() Create classroom and some encapsulation
class Classroom: def __init__(self): self._people = [] def add_person(self, person): self._people.append(person) def remove_person(self, person): self._people.remove(person) def greet(self): for person in self._people: person.say_hello() class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) room = Classroom() room.add_person(Person("Scott")) room.add_person(Person("John")) room.add_person(Person("Paul")) room.greet()
<commit_before>class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) p1 = Person("Allan") p1.say_hello() p2 = Person("John") p2.say_hello() <commit_msg>Create classroom and some encapsulation<commit_after>
class Classroom: def __init__(self): self._people = [] def add_person(self, person): self._people.append(person) def remove_person(self, person): self._people.remove(person) def greet(self): for person in self._people: person.say_hello() class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) room = Classroom() room.add_person(Person("Scott")) room.add_person(Person("John")) room.add_person(Person("Paul")) room.greet()
class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) p1 = Person("Allan") p1.say_hello() p2 = Person("John") p2.say_hello() Create classroom and some encapsulationclass Classroom: def __init__(self): self._people = [] def add_person(self, person): self._people.append(person) def remove_person(self, person): self._people.remove(person) def greet(self): for person in self._people: person.say_hello() class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) room = Classroom() room.add_person(Person("Scott")) room.add_person(Person("John")) room.add_person(Person("Paul")) room.greet()
<commit_before>class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) p1 = Person("Allan") p1.say_hello() p2 = Person("John") p2.say_hello() <commit_msg>Create classroom and some encapsulation<commit_after>class Classroom: def __init__(self): self._people = [] def add_person(self, person): self._people.append(person) def remove_person(self, person): self._people.remove(person) def greet(self): for person in self._people: person.say_hello() class Person: def __init__(self, name): self.name = name def say_hello(self): print("Hello, ", self.name) room = Classroom() room.add_person(Person("Scott")) room.add_person(Person("John")) room.add_person(Person("Paul")) room.greet()
d4bc8cebf1d629ff1f3de18ca17af4b1fdde4926
thefuck/system/win32.py
thefuck/system/win32.py
import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': raise const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)
import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': return const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)
Replace raise with return for Ctrl+C in Windows
Replace raise with return for Ctrl+C in Windows - Replace the raise `const.CtrlC` with `return const.CtrlC` the match the unix implementation and prevent a stacktrace when cancelling a command on Windows
Python
mit
scorphus/thefuck,mlk/thefuck,Clpsplug/thefuck,SimenB/thefuck,mlk/thefuck,nvbn/thefuck,scorphus/thefuck,SimenB/thefuck,nvbn/thefuck,Clpsplug/thefuck
import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': raise const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)Replace raise with return for Ctrl+C in Windows - Replace the raise `const.CtrlC` with `return const.CtrlC` the match the unix implementation and prevent a stacktrace when cancelling a command on Windows
import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': return const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)
<commit_before>import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': raise const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)<commit_msg>Replace raise with return for Ctrl+C in Windows - Replace the raise `const.CtrlC` with `return const.CtrlC` the match the unix implementation and prevent a stacktrace when cancelling a command on Windows<commit_after>
import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': return const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)
import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': raise const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)Replace raise with return for Ctrl+C in Windows - Replace the raise `const.CtrlC` with `return const.CtrlC` the match the unix implementation and prevent a stacktrace when cancelling a command on Windowsimport sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': return const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)
<commit_before>import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': raise const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)<commit_msg>Replace raise with return for Ctrl+C in Windows - Replace the raise `const.CtrlC` with `return const.CtrlC` the match the unix implementation and prevent a stacktrace when cancelling a command on Windows<commit_after>import sys import msvcrt import win_unicode_console from .. import const def init_output(): import colorama win_unicode_console.enable() colorama.init() def get_key(): ch = msvcrt.getch() if ch in (b'\x00', b'\xe0'): # arrow or function key prefix? ch = msvcrt.getch() # second call returns the actual key code if ch == b'\x03': return const.KEY_CTRL_C if ch == b'H': return const.KEY_UP if ch == b'P': return const.KEY_DOWN return ch.decode(sys.stdout.encoding)
3e37a216f532382e9a730a41677859f64b521574
thunder/utils/common.py
thunder/utils/common.py
def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raise
def notsupported(mode): raise NotImplementedError("Operation not supported for mode '%s'" % mode) def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raise
Add method for raising not supported error
Add method for raising not supported error
Python
apache-2.0
thunder-project/thunder,jwittenbach/thunder,j-friedrich/thunder,j-friedrich/thunder
def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raiseAdd method for raising not supported error
def notsupported(mode): raise NotImplementedError("Operation not supported for mode '%s'" % mode) def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raise
<commit_before>def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raise<commit_msg>Add method for raising not supported error<commit_after>
def notsupported(mode): raise NotImplementedError("Operation not supported for mode '%s'" % mode) def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raise
def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raiseAdd method for raising not supported errordef notsupported(mode): raise NotImplementedError("Operation not supported for mode '%s'" % mode) def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raise
<commit_before>def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raise<commit_msg>Add method for raising not supported error<commit_after>def notsupported(mode): raise NotImplementedError("Operation not supported for mode '%s'" % mode) def check_spark(): SparkContext = False try: from pyspark import SparkContext finally: return SparkContext def check_path(path, credentials=None): """ Check that specified output path does not already exist The ValueError message will suggest calling with overwrite=True; this function is expected to be called from the various output methods that accept an 'overwrite' keyword argument. """ from thunder.data.readers import get_file_reader reader = get_file_reader(path)(credentials=credentials) existing = reader.list(path, directories=True) if existing: raise ValueError("Path %s appears to already exist. Specify a new directory, " "or call " % path + "with overwrite=True to overwrite.") def connection_with_anon(credentials, anon=True): """ Connect to S3 with automatic handling for anonymous access. Parameters ---------- credentials : dict AWS access key ('access') and secret access key ('secret') anon : boolean, optional, default = True Whether to make an anonymous connection if credentials fail to authenticate """ from boto.s3.connection import S3Connection from boto.exception import NoAuthHandlerFound try: conn = S3Connection(aws_access_key_id=credentials['access'], aws_secret_access_key=credentials['secret']) return conn except NoAuthHandlerFound: if anon: conn = S3Connection(anon=True) return conn else: raise
e30d433153d9ad2f1d931f7f48b0ebbe9ba6763c
modules/new_module/new_module.py
modules/new_module/new_module.py
from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls) return custom_module
import logging from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" def on_module_enabled(): logging.info('Module new_module.py was just enabled') def on_module_disabled(): logging.info('Module new_module.py was just dissabled') global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls, notify_module_disabled=on_module_disabled, notify_module_enabled=on_module_enabled) return custom_module
Add enable and dissable hooks
Add enable and dissable hooks
Python
apache-2.0
UniMOOC/gcb-new-module,UniMOOC/gcb-new-module,UniMOOC/gcb-new-module,UniMOOC/gcb-new-module
from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls) return custom_module Add enable and dissable hooks
import logging from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" def on_module_enabled(): logging.info('Module new_module.py was just enabled') def on_module_disabled(): logging.info('Module new_module.py was just dissabled') global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls, notify_module_disabled=on_module_disabled, notify_module_enabled=on_module_enabled) return custom_module
<commit_before> from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls) return custom_module <commit_msg>Add enable and dissable hooks<commit_after>
import logging from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" def on_module_enabled(): logging.info('Module new_module.py was just enabled') def on_module_disabled(): logging.info('Module new_module.py was just dissabled') global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls, notify_module_disabled=on_module_disabled, notify_module_enabled=on_module_enabled) return custom_module
from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls) return custom_module Add enable and dissable hooks import logging from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" def on_module_enabled(): logging.info('Module new_module.py was just enabled') def on_module_disabled(): logging.info('Module new_module.py was just dissabled') global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls, notify_module_disabled=on_module_disabled, notify_module_enabled=on_module_enabled) return custom_module
<commit_before> from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls) return custom_module <commit_msg>Add enable and dissable hooks<commit_after> import logging from models import custom_modules from . import handlers def register_module(): """Registers this module in the registry.""" def on_module_enabled(): logging.info('Module new_module.py was just enabled') def on_module_disabled(): logging.info('Module new_module.py was just dissabled') global_urls = [ ('/new-global-url', handlers.NewURLHandler) # Global URLs go on mycourse.appspot.com/url ] course_urls = [ ('/new-course-url', handlers.NewURLHandler) ] # Course URLs go on mycourse.appspot.com/course-name/url global custom_module custom_module = custom_modules.Module( 'New module title (has to be unique)', 'Implements some functionality', global_urls, course_urls, notify_module_disabled=on_module_disabled, notify_module_enabled=on_module_enabled) return custom_module
314f387e3a227181926531f5230f21887d35038b
uploader/uploader.py
uploader/uploader.py
import os import glob import logging import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path)
import os import glob import logging import subprocess import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) def has_network_conntection(self): command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com'] try: subprocess.check_output(command) return True except: return False
Add util to test network connection
Add util to test network connection
Python
mit
projectweekend/Pi-Camera-Time-Lapse,projectweekend/Pi-Camera-Time-Lapse
import os import glob import logging import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) Add util to test network connection
import os import glob import logging import subprocess import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) def has_network_conntection(self): command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com'] try: subprocess.check_output(command) return True except: return False
<commit_before>import os import glob import logging import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) <commit_msg>Add util to test network connection<commit_after>
import os import glob import logging import subprocess import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) def has_network_conntection(self): command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com'] try: subprocess.check_output(command) return True except: return False
import os import glob import logging import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) Add util to test network connectionimport os import glob import logging import subprocess import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) def has_network_conntection(self): command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com'] try: subprocess.check_output(command) return True except: return False
<commit_before>import os import glob import logging import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) <commit_msg>Add util to test network connection<commit_after>import os import glob import logging import subprocess import dropbox from dropbox.client import DropboxClient, ErrorResponse import settings from settings import DROPBOX_TOKEN_FILE def load_dropbox_token(): with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() return dropbox_token def has_valid_dropbox_token(): try: with open(DROPBOX_TOKEN_FILE, 'r') as f: dropbox_token = f.read() client = dropbox.client.DropboxClient(dropbox_token) client.account_info() except (IOError, ErrorResponse): return False return True def get_files_to_upload(): return glob.glob(settings.IMAGES_DIRECTORY + "/*.jpg") def upload_file(path): access_token = load_dropbox_token() client = DropboxClient(access_token) name = path.split("/")[-1] with open(path, 'rb') as data: try: client.put_file(name, data) except Exception as e: logging.exception(e) else: os.remove(path) def has_network_conntection(self): command = ['ping', '-c', '1', '-W', '2', 'www.dropbox.com'] try: subprocess.check_output(command) return True except: return False
59fd414849907f73d5904f46139127ae3638c9bd
ankieta/petition_custom/forms.py
ankieta/petition_custom/forms.py
from petition.forms import SignatureForm from crispy_forms.layout import Layout, Submit from crispy_forms.bootstrap import PrependedText from crispy_forms.helper import FormHelper from django.utils.translation import ugettext as _ import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper = FormHelper(self) self.helper.form_method = 'post' self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-sign btn-lg btn-block")) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature field = ['first_name', 'second_name', 'email', 'city', 'telephone']
from petition.forms import SignatureForm from crispy_forms.layout import Layout from crispy_forms.bootstrap import PrependedText import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature fields = ['first_name', 'second_name', 'email', 'city', 'newsletter', 'telephone']
Fix typo in CustomSignatureForm fields definition
Fix typo in CustomSignatureForm fields definition
Python
bsd-3-clause
ad-m/petycja-faoo,ad-m/petycja-faoo,ad-m/petycja-faoo
from petition.forms import SignatureForm from crispy_forms.layout import Layout, Submit from crispy_forms.bootstrap import PrependedText from crispy_forms.helper import FormHelper from django.utils.translation import ugettext as _ import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper = FormHelper(self) self.helper.form_method = 'post' self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-sign btn-lg btn-block")) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature field = ['first_name', 'second_name', 'email', 'city', 'telephone'] Fix typo in CustomSignatureForm fields definition
from petition.forms import SignatureForm from crispy_forms.layout import Layout from crispy_forms.bootstrap import PrependedText import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature fields = ['first_name', 'second_name', 'email', 'city', 'newsletter', 'telephone']
<commit_before>from petition.forms import SignatureForm from crispy_forms.layout import Layout, Submit from crispy_forms.bootstrap import PrependedText from crispy_forms.helper import FormHelper from django.utils.translation import ugettext as _ import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper = FormHelper(self) self.helper.form_method = 'post' self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-sign btn-lg btn-block")) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature field = ['first_name', 'second_name', 'email', 'city', 'telephone'] <commit_msg>Fix typo in CustomSignatureForm fields definition<commit_after>
from petition.forms import SignatureForm from crispy_forms.layout import Layout from crispy_forms.bootstrap import PrependedText import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature fields = ['first_name', 'second_name', 'email', 'city', 'newsletter', 'telephone']
from petition.forms import SignatureForm from crispy_forms.layout import Layout, Submit from crispy_forms.bootstrap import PrependedText from crispy_forms.helper import FormHelper from django.utils.translation import ugettext as _ import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper = FormHelper(self) self.helper.form_method = 'post' self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-sign btn-lg btn-block")) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature field = ['first_name', 'second_name', 'email', 'city', 'telephone'] Fix typo in CustomSignatureForm fields definitionfrom petition.forms import SignatureForm from crispy_forms.layout import Layout from crispy_forms.bootstrap import PrependedText import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature fields = ['first_name', 'second_name', 'email', 'city', 'newsletter', 'telephone']
<commit_before>from petition.forms import SignatureForm from crispy_forms.layout import Layout, Submit from crispy_forms.bootstrap import PrependedText from crispy_forms.helper import FormHelper from django.utils.translation import ugettext as _ import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper = FormHelper(self) self.helper.form_method = 'post' self.helper.add_input(Submit('submit', _('Sign'), css_class="btn-sign btn-lg btn-block")) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature field = ['first_name', 'second_name', 'email', 'city', 'telephone'] <commit_msg>Fix typo in CustomSignatureForm fields definition<commit_after>from petition.forms import SignatureForm from crispy_forms.layout import Layout from crispy_forms.bootstrap import PrependedText import swapper Signature = swapper.load_model("petition", "Signature") class CustomSignatureForm(SignatureForm): def __init__(self, *args, **kwargs): super(CustomSignatureForm, self).__init__(*args, **kwargs) self.helper.layout = Layout( 'first_name', 'second_name', PrependedText('email', '@'), PrependedText('city', '<i class="fa fa-globe"></i>'), PrependedText('telephone', '<i class="fa fa-phone"></i>'), 'giodo', 'newsletter', ) class Meta: model = Signature fields = ['first_name', 'second_name', 'email', 'city', 'newsletter', 'telephone']
80a9019cb24ea581a9cef0344caaf4cec4a95a94
testproject/chtest/consumers.py
testproject/chtest/consumers.py
from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send(message.content)
from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send({ "text": message['text'], })
Fix echo endpoint in testproject
Fix echo endpoint in testproject
Python
bsd-3-clause
Krukov/channels,Coread/channels,django/channels,andrewgodwin/channels,linuxlewis/channels,Krukov/channels,raphael-boucher/channels,andrewgodwin/django-channels,raiderrobert/channels,Coread/channels
from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send(message.content) Fix echo endpoint in testproject
from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send({ "text": message['text'], })
<commit_before>from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send(message.content) <commit_msg>Fix echo endpoint in testproject<commit_after>
from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send({ "text": message['text'], })
from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send(message.content) Fix echo endpoint in testprojectfrom channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send({ "text": message['text'], })
<commit_before>from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send(message.content) <commit_msg>Fix echo endpoint in testproject<commit_after>from channels.sessions import enforce_ordering #@enforce_ordering(slight=True) def ws_connect(message): pass #@enforce_ordering(slight=True) def ws_message(message): "Echoes messages back to the client" message.reply_channel.send({ "text": message['text'], })
824c8cd3eb563de60ddf13fac1f7ca1341aa01f1
astral/api/tests/test_streams.py
astral/api/tests/test_streams.py
from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop) response = self.wait() eq_(response.code, 200) ok_(Stream.get_by(name=data['name']))
from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data), follow_redirects=False), self.stop) response = self.wait() eq_(response.code, 302) ok_(Stream.get_by(name=data['name']))
Update tests for new redirect-after-create stream.
Update tests for new redirect-after-create stream.
Python
mit
peplin/astral
from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop) response = self.wait() eq_(response.code, 200) ok_(Stream.get_by(name=data['name'])) Update tests for new redirect-after-create stream.
from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data), follow_redirects=False), self.stop) response = self.wait() eq_(response.code, 302) ok_(Stream.get_by(name=data['name']))
<commit_before>from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop) response = self.wait() eq_(response.code, 200) ok_(Stream.get_by(name=data['name'])) <commit_msg>Update tests for new redirect-after-create stream.<commit_after>
from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data), follow_redirects=False), self.stop) response = self.wait() eq_(response.code, 302) ok_(Stream.get_by(name=data['name']))
from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop) response = self.wait() eq_(response.code, 200) ok_(Stream.get_by(name=data['name'])) Update tests for new redirect-after-create stream.from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data), follow_redirects=False), self.stop) response = self.wait() eq_(response.code, 302) ok_(Stream.get_by(name=data['name']))
<commit_before>from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data)), self.stop) response = self.wait() eq_(response.code, 200) ok_(Stream.get_by(name=data['name'])) <commit_msg>Update tests for new redirect-after-create stream.<commit_after>from tornado.httpclient import HTTPRequest from nose.tools import eq_, ok_ import json import faker from astral.api.tests import BaseTest from astral.models import Stream from astral.models.tests.factories import StreamFactory class StreamsHandlerTest(BaseTest): def test_get_streams(self): [StreamFactory() for _ in range(3)] response = self.fetch('/streams') eq_(response.code, 200) result = json.loads(response.body) ok_('streams' in result) for stream in result['streams']: ok_(Stream.get_by(name=stream['name'])) def test_create_stream(self): data = {'name': faker.lorem.sentence()} eq_(Stream.get_by(name=data['name']), None) self.http_client.fetch(HTTPRequest( self.get_url('/streams'), 'POST', body=json.dumps(data), follow_redirects=False), self.stop) response = self.wait() eq_(response.code, 302) ok_(Stream.get_by(name=data['name']))
e6a251da6d6902d2633afab7c4e9ecaf366f964c
tools/build_modref_templates.py
tools/build_modref_templates.py
#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules)
#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.interfaces\.gorlin_glue$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules)
Remove gorlin_glue from generated docs for now. It produces about 100 warnings during doc build.
Remove gorlin_glue from generated docs for now. It produces about 100 warnings during doc build. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@926 ead46cd0-7350-4e37-8683-fc4c6f79bf00
Python
bsd-3-clause
glatard/nipype,grlee77/nipype,glatard/nipype,arokem/nipype,wanderine/nipype,FCP-INDI/nipype,arokem/nipype,mick-d/nipype_source,sgiavasis/nipype,FredLoney/nipype,carolFrohlich/nipype,mick-d/nipype,gerddie/nipype,Leoniela/nipype,wanderine/nipype,gerddie/nipype,dgellis90/nipype,Leoniela/nipype,FCP-INDI/nipype,FredLoney/nipype,carolFrohlich/nipype,sgiavasis/nipype,carlohamalainen/nipype,JohnGriffiths/nipype,glatard/nipype,dmordom/nipype,blakedewey/nipype,mick-d/nipype,glatard/nipype,blakedewey/nipype,JohnGriffiths/nipype,satra/NiPypeold,christianbrodbeck/nipype,mick-d/nipype,mick-d/nipype_source,wanderine/nipype,grlee77/nipype,rameshvs/nipype,rameshvs/nipype,carlohamalainen/nipype,satra/NiPypeold,rameshvs/nipype,pearsonlab/nipype,sgiavasis/nipype,mick-d/nipype_source,pearsonlab/nipype,christianbrodbeck/nipype,rameshvs/nipype,iglpdc/nipype,gerddie/nipype,fprados/nipype,FCP-INDI/nipype,dgellis90/nipype,JohnGriffiths/nipype,FredLoney/nipype,dgellis90/nipype,Leoniela/nipype,fprados/nipype,carolFrohlich/nipype,blakedewey/nipype,JohnGriffiths/nipype,iglpdc/nipype,fprados/nipype,gerddie/nipype,grlee77/nipype,arokem/nipype,grlee77/nipype,mick-d/nipype,wanderine/nipype,carlohamalainen/nipype,dmordom/nipype,dmordom/nipype,arokem/nipype,FCP-INDI/nipype,sgiavasis/nipype,pearsonlab/nipype,iglpdc/nipype,pearsonlab/nipype,dgellis90/nipype,iglpdc/nipype,carolFrohlich/nipype,blakedewey/nipype
#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules) Remove gorlin_glue from generated docs for now. It produces about 100 warnings during doc build. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@926 ead46cd0-7350-4e37-8683-fc4c6f79bf00
#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.interfaces\.gorlin_glue$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules)
<commit_before>#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules) <commit_msg>Remove gorlin_glue from generated docs for now. It produces about 100 warnings during doc build. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@926 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>
#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.interfaces\.gorlin_glue$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules)
#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules) Remove gorlin_glue from generated docs for now. It produces about 100 warnings during doc build. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@926 ead46cd0-7350-4e37-8683-fc4c6f79bf00#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.interfaces\.gorlin_glue$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules)
<commit_before>#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules) <commit_msg>Remove gorlin_glue from generated docs for now. It produces about 100 warnings during doc build. git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@926 ead46cd0-7350-4e37-8683-fc4c6f79bf00<commit_after>#!/usr/bin/env python """Script to auto-generate our API docs. """ # stdlib imports import os import sys # local imports from apigen import ApiDocWriter #***************************************************************************** if __name__ == '__main__': nipypepath = os.path.abspath('..') sys.path.insert(1,nipypepath) package = 'nipype' outdir = os.path.join('api','generated') docwriter = ApiDocWriter(package) # Packages that should not be included in generated API docs. docwriter.package_skip_patterns += ['\.externals$', '\.utils$', '\.interfaces\.gorlin_glue$', ] # Modules that should not be included in generated API docs. docwriter.module_skip_patterns += ['\.version$', '\.interfaces\.afni$', '\.pipeline\.alloy$', '\.pipeline\.s3_node_wrapper$', ] docwriter.write_api_docs(outdir) docwriter.write_index(outdir, 'gen', relative_to='api') print '%d files written' % len(docwriter.written_modules)
97cfc12433b32997bf7345512326d160ea4e48fa
systemd/install.py
systemd/install.py
""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) print('Creating service files') call('./python systemd/create.py', shell=True) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True)
""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True)
Remove creation of service files.
Remove creation of service files.
Python
mit
vmlaker/wabbit,vmlaker/wabbit,vmlaker/wabbit,vmlaker/wabbit
""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) print('Creating service files') call('./python systemd/create.py', shell=True) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True) Remove creation of service files.
""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True)
<commit_before>""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) print('Creating service files') call('./python systemd/create.py', shell=True) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True) <commit_msg>Remove creation of service files.<commit_after>
""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True)
""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) print('Creating service files') call('./python systemd/create.py', shell=True) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True) Remove creation of service files.""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True)
<commit_before>""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) print('Creating service files') call('./python systemd/create.py', shell=True) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True) <commit_msg>Remove creation of service files.<commit_after>""" Install Wabbit Systemd service. """ from glob import glob from shutil import copy from os import chdir from os.path import dirname, realpath from subprocess import call import sys import coils # Load configuration file. CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.conf' config = coils.Config(CONFIG) # Do the work in the directory of this file. this_dir = dirname(realpath(__file__)) chdir(this_dir) dst = '/lib/systemd/system' for fname in glob('{}*.service'.format(config['db_name'])): print('Copying {} to {}'.format(fname, dst)) copy(fname, dst) print('Enabling {}'.format(fname)) call('systemctl enable {}'.format(fname), shell=True)
d2032ed28e97b8a23c4eec95fcadfaa80e944f01
conman/pages/tests/test_views.py
conman/pages/tests/test_views.py
from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): view = views.PageDetail def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content)
from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content)
Remove unused test class attr
Remove unused test class attr
Python
bsd-2-clause
meshy/django-conman,Ian-Foote/django-conman,meshy/django-conman
from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): view = views.PageDetail def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content) Remove unused test class attr
from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content)
<commit_before>from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): view = views.PageDetail def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content) <commit_msg>Remove unused test class attr<commit_after>
from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content)
from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): view = views.PageDetail def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content) Remove unused test class attrfrom django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content)
<commit_before>from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): view = views.PageDetail def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content) <commit_msg>Remove unused test class attr<commit_after>from django.test import TestCase from conman.tests.utils import RequestTestCase from . import factories from .. import views class TestPageDetail(RequestTestCase): def test_get_object(self): """PageDetail displays the page instance passed in the node kwarg.""" request = self.create_request() page = factories.PageFactory.create() view = views.PageDetail(request=request, kwargs={'node': page}) obj = view.get_object() self.assertEqual(obj, page) class TestPageDetailIntegration(TestCase): def test_get(self): """A page's content is rendered at its url.""" page = factories.PageFactory.create(content='This is a test') response = self.client.get(page.url) self.assertIn(page.content, response.rendered_content)
42d3a71acc586bc92800e7ac21b7838f05cb595c
osbrain/__init__.py
osbrain/__init__.py
import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.2' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger
import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.3-devel' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger
Set osBrain version to 0.2.3-devel
Set osBrain version to 0.2.3-devel
Python
apache-2.0
opensistemas-hub/osbrain
import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.2' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger Set osBrain version to 0.2.3-devel
import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.3-devel' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger
<commit_before>import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.2' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger <commit_msg>Set osBrain version to 0.2.3-devel<commit_after>
import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.3-devel' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger
import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.2' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger Set osBrain version to 0.2.3-develimport Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.3-devel' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger
<commit_before>import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.2' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger <commit_msg>Set osBrain version to 0.2.3-devel<commit_after>import Pyro4 Pyro4.config.SERIALIZERS_ACCEPTED.add('pickle') Pyro4.config.SERIALIZER = 'pickle' Pyro4.config.THREADPOOL_SIZE = 16 Pyro4.config.SERVERTYPE = 'multiplex' Pyro4.config.REQUIRE_EXPOSE = False # TODO: should we set COMMTIMEOUT as well? Pyro4.config.DETAILED_TRACEBACK = True __version__ = '0.2.3-devel' from .core import BaseAgent, Agent, run_agent from .nameserver import random_nameserver, run_nameserver from .proxy import Proxy, NSProxy from .address import SocketAddress, AgentAddress from .logging import Logger, run_logger
b83958697004d7203fed20a3024efe3c653f9535
tiddlywebconfig.py
tiddlywebconfig.py
config = { 'wikitext.default_renderer': 'twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'twikified' } }
config = { 'wikitext.default_renderer': 'tiddlywebplugins.twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'tiddlywebplugins.twikified' } }
Use fully qualified module names
Use fully qualified module names This is so that the tiddlyweb instance used in the tests can find the renderer
Python
bsd-3-clause
TiddlySpace/tiddlywebplugins.twikified
config = { 'wikitext.default_renderer': 'twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'twikified' } } Use fully qualified module names This is so that the tiddlyweb instance used in the tests can find the renderer
config = { 'wikitext.default_renderer': 'tiddlywebplugins.twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'tiddlywebplugins.twikified' } }
<commit_before>config = { 'wikitext.default_renderer': 'twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'twikified' } } <commit_msg>Use fully qualified module names This is so that the tiddlyweb instance used in the tests can find the renderer<commit_after>
config = { 'wikitext.default_renderer': 'tiddlywebplugins.twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'tiddlywebplugins.twikified' } }
config = { 'wikitext.default_renderer': 'twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'twikified' } } Use fully qualified module names This is so that the tiddlyweb instance used in the tests can find the rendererconfig = { 'wikitext.default_renderer': 'tiddlywebplugins.twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'tiddlywebplugins.twikified' } }
<commit_before>config = { 'wikitext.default_renderer': 'twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'twikified' } } <commit_msg>Use fully qualified module names This is so that the tiddlyweb instance used in the tests can find the renderer<commit_after>config = { 'wikitext.default_renderer': 'tiddlywebplugins.twikified', 'wikitext.type_render_map': { 'text/x-tiddlywiki': 'tiddlywebplugins.twikified' } }
fab0855e7076d7cfcfe2d65a820ed5099084f543
privileges/views.py
privileges/views.py
import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator
import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator class UsernameContextMixin(object): def get_context_data(self, **kwargs): context = super(UsernameContextMixin, self).get_context_data(**kwargs) context.update({ "username": self.kwargs.get("username") }) return context
Add mixin to put the username in context
Add mixin to put the username in context
Python
bsd-3-clause
eldarion/privileges,jacobwegner/privileges,jacobwegner/privileges
import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator Add mixin to put the username in context
import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator class UsernameContextMixin(object): def get_context_data(self, **kwargs): context = super(UsernameContextMixin, self).get_context_data(**kwargs) context.update({ "username": self.kwargs.get("username") }) return context
<commit_before>import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator <commit_msg>Add mixin to put the username in context<commit_after>
import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator class UsernameContextMixin(object): def get_context_data(self, **kwargs): context = super(UsernameContextMixin, self).get_context_data(**kwargs) context.update({ "username": self.kwargs.get("username") }) return context
import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator Add mixin to put the username in contextimport urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator class UsernameContextMixin(object): def get_context_data(self, **kwargs): context = super(UsernameContextMixin, self).get_context_data(**kwargs) context.update({ "username": self.kwargs.get("username") }) return context
<commit_before>import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator <commit_msg>Add mixin to put the username in context<commit_after>import urlparse from functools import wraps from django.conf import settings from django.utils.decorators import available_attrs, method_decorator from django.contrib.auth import REDIRECT_FIELD_NAME from privileges.forms import GrantForm from privileges.models import Grant def owner_required(view_func): @wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): if request.user.is_authenticated(): if request.user.username == kwargs["username"] or \ request.user.is_superuser: return view_func(request, *args, **kwargs) path = request.build_absolute_uri() login_scheme, login_netloc = urlparse.urlparse(settings.LOGIN_URL)[:2] current_scheme, current_netloc = urlparse.urlparse(path)[:2] if ((not login_scheme or login_scheme == current_scheme) and (not login_netloc or login_netloc == current_netloc)): path = request.get_full_path() from django.contrib.auth.views import redirect_to_login return redirect_to_login(path, redirect_field_name=REDIRECT_FIELD_NAME) return _wrapped_view def cbv_decorator(decorator): def _decorator(cls): cls.dispatch = method_decorator(decorator)(cls.dispatch) return cls return _decorator class UsernameContextMixin(object): def get_context_data(self, **kwargs): context = super(UsernameContextMixin, self).get_context_data(**kwargs) context.update({ "username": self.kwargs.get("username") }) return context
e30c65f6dc35c53e5f0caaace36ba3fa0a928efa
testing/settings.py
testing/settings.py
# -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True
# -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } MIDDLEWARE_CLASSES = () TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True
Fix warning on running tests adding missing setting.
Fix warning on running tests adding missing setting.
Python
bsd-3-clause
CloudNcodeInc/djmail,CloudNcodeInc/djmail,CloudNcodeInc/djmail
# -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True Fix warning on running tests adding missing setting.
# -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } MIDDLEWARE_CLASSES = () TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True
<commit_before># -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True <commit_msg>Fix warning on running tests adding missing setting.<commit_after>
# -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } MIDDLEWARE_CLASSES = () TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True
# -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True Fix warning on running tests adding missing setting.# -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } MIDDLEWARE_CLASSES = () TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True
<commit_before># -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True <commit_msg>Fix warning on running tests adding missing setting.<commit_after># -*- encoding: utf-8 -*- import os, sys sys.path.insert(0, '..') PROJECT_ROOT = os.path.dirname(__file__) DEBUG = True TEMPLATE_DEBUG = DEBUG DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'test' } } MIDDLEWARE_CLASSES = () TIME_ZONE = 'America/Chicago' LANGUAGE_CODE = 'en-us' ADMIN_MEDIA_PREFIX = '/static/admin/' STATICFILES_DIRS = () SECRET_KEY = 'di!n($kqa3)nd%ikad#kcjpkd^uw*h%*kj=*pm7$vbo6ir7h=l' INSTALLED_APPS = ( 'djmail', 'djcelery', 'testing', ) import djcelery djcelery.setup_loader() CELERY_ALWAYS_EAGER = True
3db3d9b827f3acaef2223fec75bc41a531a4f1ee
takeyourmeds/reminders/checks.py
takeyourmeds/reminders/checks.py
import os from django.core import checks from django.contrib.staticfiles.finders import find from .apps import RemindersConfig @checks.register() def voice_reminders_exist(app_configs, **kwargs): for x, _ in RemindersConfig.voice_reminders: if not find(os.path.join('mp3', x)): yield checks.Error( "Reminder recording missing: %r" % x, id='takeyourmeds.reminders.E001', )
Check that the MP3 files actually exist.
Check that the MP3 files actually exist. Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
Python
mit
takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web,takeyourmeds/takeyourmeds-web
Check that the MP3 files actually exist. Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>
import os from django.core import checks from django.contrib.staticfiles.finders import find from .apps import RemindersConfig @checks.register() def voice_reminders_exist(app_configs, **kwargs): for x, _ in RemindersConfig.voice_reminders: if not find(os.path.join('mp3', x)): yield checks.Error( "Reminder recording missing: %r" % x, id='takeyourmeds.reminders.E001', )
<commit_before><commit_msg>Check that the MP3 files actually exist. Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>
import os from django.core import checks from django.contrib.staticfiles.finders import find from .apps import RemindersConfig @checks.register() def voice_reminders_exist(app_configs, **kwargs): for x, _ in RemindersConfig.voice_reminders: if not find(os.path.join('mp3', x)): yield checks.Error( "Reminder recording missing: %r" % x, id='takeyourmeds.reminders.E001', )
Check that the MP3 files actually exist. Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk>import os from django.core import checks from django.contrib.staticfiles.finders import find from .apps import RemindersConfig @checks.register() def voice_reminders_exist(app_configs, **kwargs): for x, _ in RemindersConfig.voice_reminders: if not find(os.path.join('mp3', x)): yield checks.Error( "Reminder recording missing: %r" % x, id='takeyourmeds.reminders.E001', )
<commit_before><commit_msg>Check that the MP3 files actually exist. Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@chris-lamb.co.uk><commit_after>import os from django.core import checks from django.contrib.staticfiles.finders import find from .apps import RemindersConfig @checks.register() def voice_reminders_exist(app_configs, **kwargs): for x, _ in RemindersConfig.voice_reminders: if not find(os.path.join('mp3', x)): yield checks.Error( "Reminder recording missing: %r" % x, id='takeyourmeds.reminders.E001', )
f3eb94bbe10160a4337c5eb9241166f60b9724a8
pyvideo/settings.py
pyvideo/settings.py
# Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', )
# Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', )
Add heroku host to ALLOWED_HOSTS
Add heroku host to ALLOWED_HOSTS
Python
bsd-3-clause
WarmongeR1/pyvideo.ru,WarmongeR1/pyvideo.ru,WarmongeR1/pyvideo.ru,coagulant/pyvideo.ru,coagulant/pyvideo.ru,coagulant/pyvideo.ru
# Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', ) Add heroku host to ALLOWED_HOSTS
# Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', )
<commit_before># Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', ) <commit_msg>Add heroku host to ALLOWED_HOSTS<commit_after>
# Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', )
# Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', ) Add heroku host to ALLOWED_HOSTS# Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', )
<commit_before># Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', ) <commit_msg>Add heroku host to ALLOWED_HOSTS<commit_after># Django settings for pyvideo project. from richard.settings import * ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com'] TIME_ZONE = 'Europe/Moscow' LANGUAGE_CODE = 'ru' SECRET_KEY = 'this_is_not_production_so_who_cares' ROOT_URLCONF = 'pyvideo.urls' WSGI_APPLICATION = 'pyvideo.wsgi.application' TEMPLATE_DIRS = ( os.path.join(ROOT, 'templates'), ) INSTALLED_APPS = INSTALLED_APPS + ( 'raven.contrib.django.raven_compat', )
722e975e8819b59d9d2f53627a5d37550ea09c55
tests/test_clean.py
tests/test_clean.py
from mergepurge import clean import pandas as pd import numpy as np t_data = pd.Series({'name': 'Timothy Testerosa III'}) t_parsed = (np.nan, 'Timothy', 'Testerosa', 'Timothy Testerosa') # FIXME - load a csv file with a name column and the 4 correctly parsed name parts as 4 other cols # Then, iterate over the names def test_clean_contact_name(): assert clean.parse_contact_name(t_data, ['name'], False) == t_parsed
from mergepurge import clean import pandas as pd import numpy as np complete = pd.read_csv('complete_parsed.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] COMP_CONTACT_COLS = ['first', 'last'] COMP_COMPANY_COLS = ['company'] BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] partial = pd.read_csv('./incomplete.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) def test_clean_contact_name(): # Re-parse approx 100 records for _, test_record in complete.iterrows(): known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) assert parsed == known def test_parse_location_cols(): for _, test_record in complete.iterrows(): known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) assert parsed == known def test_parse_business_name(): for _, test_record in complete.iterrows(): known = test_record.get('aa_company', np.nan) parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) assert parsed == known def test_build_matching_cols(): known = complete[BUILT_COLS].head(10).copy() built = clean.build_matching_cols(complete.head(10).copy(), COMP_LOC_COLS, COMP_CONTACT_COLS, COMP_COMPANY_COLS) assert all(built[BUILT_COLS] == known)
Add tests for most functions in clean module
Add tests for most functions in clean module Iterate over the complete and parsed test data and confirm we can still produce the excepted output for most functions in clean.py.
Python
mit
mikecunha/mergepurge
from mergepurge import clean import pandas as pd import numpy as np t_data = pd.Series({'name': 'Timothy Testerosa III'}) t_parsed = (np.nan, 'Timothy', 'Testerosa', 'Timothy Testerosa') # FIXME - load a csv file with a name column and the 4 correctly parsed name parts as 4 other cols # Then, iterate over the names def test_clean_contact_name(): assert clean.parse_contact_name(t_data, ['name'], False) == t_parsed Add tests for most functions in clean module Iterate over the complete and parsed test data and confirm we can still produce the excepted output for most functions in clean.py.
from mergepurge import clean import pandas as pd import numpy as np complete = pd.read_csv('complete_parsed.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] COMP_CONTACT_COLS = ['first', 'last'] COMP_COMPANY_COLS = ['company'] BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] partial = pd.read_csv('./incomplete.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) def test_clean_contact_name(): # Re-parse approx 100 records for _, test_record in complete.iterrows(): known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) assert parsed == known def test_parse_location_cols(): for _, test_record in complete.iterrows(): known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) assert parsed == known def test_parse_business_name(): for _, test_record in complete.iterrows(): known = test_record.get('aa_company', np.nan) parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) assert parsed == known def test_build_matching_cols(): known = complete[BUILT_COLS].head(10).copy() built = clean.build_matching_cols(complete.head(10).copy(), COMP_LOC_COLS, COMP_CONTACT_COLS, COMP_COMPANY_COLS) assert all(built[BUILT_COLS] == known)
<commit_before>from mergepurge import clean import pandas as pd import numpy as np t_data = pd.Series({'name': 'Timothy Testerosa III'}) t_parsed = (np.nan, 'Timothy', 'Testerosa', 'Timothy Testerosa') # FIXME - load a csv file with a name column and the 4 correctly parsed name parts as 4 other cols # Then, iterate over the names def test_clean_contact_name(): assert clean.parse_contact_name(t_data, ['name'], False) == t_parsed <commit_msg>Add tests for most functions in clean module Iterate over the complete and parsed test data and confirm we can still produce the excepted output for most functions in clean.py.<commit_after>
from mergepurge import clean import pandas as pd import numpy as np complete = pd.read_csv('complete_parsed.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] COMP_CONTACT_COLS = ['first', 'last'] COMP_COMPANY_COLS = ['company'] BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] partial = pd.read_csv('./incomplete.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) def test_clean_contact_name(): # Re-parse approx 100 records for _, test_record in complete.iterrows(): known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) assert parsed == known def test_parse_location_cols(): for _, test_record in complete.iterrows(): known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) assert parsed == known def test_parse_business_name(): for _, test_record in complete.iterrows(): known = test_record.get('aa_company', np.nan) parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) assert parsed == known def test_build_matching_cols(): known = complete[BUILT_COLS].head(10).copy() built = clean.build_matching_cols(complete.head(10).copy(), COMP_LOC_COLS, COMP_CONTACT_COLS, COMP_COMPANY_COLS) assert all(built[BUILT_COLS] == known)
from mergepurge import clean import pandas as pd import numpy as np t_data = pd.Series({'name': 'Timothy Testerosa III'}) t_parsed = (np.nan, 'Timothy', 'Testerosa', 'Timothy Testerosa') # FIXME - load a csv file with a name column and the 4 correctly parsed name parts as 4 other cols # Then, iterate over the names def test_clean_contact_name(): assert clean.parse_contact_name(t_data, ['name'], False) == t_parsed Add tests for most functions in clean module Iterate over the complete and parsed test data and confirm we can still produce the excepted output for most functions in clean.py.from mergepurge import clean import pandas as pd import numpy as np complete = pd.read_csv('complete_parsed.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] COMP_CONTACT_COLS = ['first', 'last'] COMP_COMPANY_COLS = ['company'] BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] partial = pd.read_csv('./incomplete.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) def test_clean_contact_name(): # Re-parse approx 100 records for _, test_record in complete.iterrows(): known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) assert parsed == known def test_parse_location_cols(): for _, test_record in complete.iterrows(): known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) assert parsed == known def test_parse_business_name(): for _, test_record in complete.iterrows(): known = test_record.get('aa_company', np.nan) parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) assert parsed == known def test_build_matching_cols(): known = complete[BUILT_COLS].head(10).copy() built = clean.build_matching_cols(complete.head(10).copy(), COMP_LOC_COLS, COMP_CONTACT_COLS, COMP_COMPANY_COLS) assert all(built[BUILT_COLS] == known)
<commit_before>from mergepurge import clean import pandas as pd import numpy as np t_data = pd.Series({'name': 'Timothy Testerosa III'}) t_parsed = (np.nan, 'Timothy', 'Testerosa', 'Timothy Testerosa') # FIXME - load a csv file with a name column and the 4 correctly parsed name parts as 4 other cols # Then, iterate over the names def test_clean_contact_name(): assert clean.parse_contact_name(t_data, ['name'], False) == t_parsed <commit_msg>Add tests for most functions in clean module Iterate over the complete and parsed test data and confirm we can still produce the excepted output for most functions in clean.py.<commit_after>from mergepurge import clean import pandas as pd import numpy as np complete = pd.read_csv('complete_parsed.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) COMP_LOC_COLS = ['address', 'city', 'state', 'zipcode'] COMP_CONTACT_COLS = ['first', 'last'] COMP_COMPANY_COLS = ['company'] BUILT_COLS = [col for col in complete.columns if col.startswith('aa_')] partial = pd.read_csv('./incomplete.tsv', sep='\t', encoding='utf-8', dtype={'aa_streetnum': str, 'aa_zip': str, 'zipcode': str}) def test_clean_contact_name(): # Re-parse approx 100 records for _, test_record in complete.iterrows(): known = (test_record.get('aa_title', np.nan), test_record.get('aa_firstname', np.nan), test_record.get('aa_lastname', np.nan), test_record.get('aa_fullname', np.nan)) parsed = clean.parse_contact_name(test_record, COMP_CONTACT_COLS, strict=False) assert parsed == known def test_parse_location_cols(): for _, test_record in complete.iterrows(): known = (test_record.get('aa_streetnum', np.nan), test_record.get('aa_street', np.nan), test_record.get('aa_city', np.nan), test_record.get('aa_state', np.nan), test_record.get('aa_zip', np.nan), test_record.get('aa_fulladdy', np.nan)) parsed = clean.parse_location_cols(test_record, COMP_LOC_COLS, strict=False) assert parsed == known def test_parse_business_name(): for _, test_record in complete.iterrows(): known = test_record.get('aa_company', np.nan) parsed = clean.parse_business_name(test_record, COMP_COMPANY_COLS, strict=False) assert parsed == known def test_build_matching_cols(): known = complete[BUILT_COLS].head(10).copy() built = clean.build_matching_cols(complete.head(10).copy(), COMP_LOC_COLS, COMP_CONTACT_COLS, COMP_COMPANY_COLS) assert all(built[BUILT_COLS] == known)
1aed26838d1616b3686b74697e01bb4da5e47b79
sqlobject/tests/test_identity.py
sqlobject/tests/test_identity.py
from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert(i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert(i2get.n == 200)
from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert (i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert (i2get.n == 200)
Fix `flake8` E275 missing whitespace after keyword
Style: Fix `flake8` E275 missing whitespace after keyword
Python
lgpl-2.1
sqlobject/sqlobject,sqlobject/sqlobject
from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert(i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert(i2get.n == 200) Style: Fix `flake8` E275 missing whitespace after keyword
from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert (i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert (i2get.n == 200)
<commit_before>from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert(i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert(i2get.n == 200) <commit_msg>Style: Fix `flake8` E275 missing whitespace after keyword<commit_after>
from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert (i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert (i2get.n == 200)
from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert(i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert(i2get.n == 200) Style: Fix `flake8` E275 missing whitespace after keywordfrom sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert (i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert (i2get.n == 200)
<commit_before>from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert(i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert(i2get.n == 200) <commit_msg>Style: Fix `flake8` E275 missing whitespace after keyword<commit_after>from sqlobject import IntCol, SQLObject from sqlobject.tests.dbtest import getConnection, setupClass ######################################## # Identity (MS SQL) ######################################## class SOTestIdentity(SQLObject): n = IntCol() def test_identity(): # (re)create table SOTestIdentity.dropTable(connection=getConnection(), ifExists=True) setupClass(SOTestIdentity) # insert without giving identity SOTestIdentity(n=100) # i1 # verify result i1get = SOTestIdentity.get(1) assert (i1get.n == 100) # insert while giving identity SOTestIdentity(id=2, n=200) # i2 # verify result i2get = SOTestIdentity.get(2) assert (i2get.n == 200)
59b59e75f87942dfd54f8542b04e4185a871cf4b
utils/messaging.py
utils/messaging.py
""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message)
""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) async def message_input(ctx, prompt, timeout=60): message = await ctx.bot.say(prompt) password = await ctx.bot.wait_for_message( timeout=timeout, author=ctx.message.author, channel=ctx.message.channel) if not password: await ctx.bot.edit_message( message, new_content='Timed out, cancelling.') return password
Add util function for accepting input by PM
Add util function for accepting input by PM
Python
mit
randomic/antinub-gregbot
""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) Add util function for accepting input by PM
""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) async def message_input(ctx, prompt, timeout=60): message = await ctx.bot.say(prompt) password = await ctx.bot.wait_for_message( timeout=timeout, author=ctx.message.author, channel=ctx.message.channel) if not password: await ctx.bot.edit_message( message, new_content='Timed out, cancelling.') return password
<commit_before>""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) <commit_msg>Add util function for accepting input by PM<commit_after>
""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) async def message_input(ctx, prompt, timeout=60): message = await ctx.bot.say(prompt) password = await ctx.bot.wait_for_message( timeout=timeout, author=ctx.message.author, channel=ctx.message.channel) if not password: await ctx.bot.edit_message( message, new_content='Timed out, cancelling.') return password
""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) Add util function for accepting input by PM""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) async def message_input(ctx, prompt, timeout=60): message = await ctx.bot.say(prompt) password = await ctx.bot.wait_for_message( timeout=timeout, author=ctx.message.author, channel=ctx.message.channel) if not password: await ctx.bot.edit_message( message, new_content='Timed out, cancelling.') return password
<commit_before>""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) <commit_msg>Add util function for accepting input by PM<commit_after>""" Contains utilities regarding messages """ def paginate(string, pref='```\n', aff='```', max_length=2000, sep='\n'): 'Chop a string into even chunks of max_length around the given separator' max_size = max_length - len(pref) - len(aff) str_length = len(string) if str_length <= max_size: return [pref + string + aff] else: split = string.rfind(sep, 0, max_size) + 1 if split: return ([pref + string[:split] + aff] + paginate(string[split:], pref, aff, max_length, sep)) else: return ([pref + string[:max_size] + aff] + paginate(string[max_size:], pref, aff, max_length, sep)) async def notify_owner(bot, messages): 'Send message to the private channel of the owner' channel = await bot.get_user_info(bot.config.get('owner_id')) for message in messages: await bot.send_message(channel, message) async def message_input(ctx, prompt, timeout=60): message = await ctx.bot.say(prompt) password = await ctx.bot.wait_for_message( timeout=timeout, author=ctx.message.author, channel=ctx.message.channel) if not password: await ctx.bot.edit_message( message, new_content='Timed out, cancelling.') return password
0e75e31b9e038ca6e0b399ff3b684afcd271c090
prefixlist/prefixlist.py
prefixlist/prefixlist.py
class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = [] def __iter__(self): for member in self.members: yield member def add_member(self, member): memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members.append(memb) @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for member in self.members: print("AS{}: {}".format(member["asn"], member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"]))
class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = {} def __iter__(self): for asn in self.members: yield self.members[asn] def add_member(self, member): if member.asn in self.members: return memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members[member.asn] = memb @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for asn in self.members: member = self.members[asn] print("AS{}: {}".format(asn, member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"]))
Store data set as dict instead of list
Store data set as dict instead of list
Python
bsd-2-clause
emjemj/pre-fixlist
class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = [] def __iter__(self): for member in self.members: yield member def add_member(self, member): memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members.append(memb) @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for member in self.members: print("AS{}: {}".format(member["asn"], member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"])) Store data set as dict instead of list
class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = {} def __iter__(self): for asn in self.members: yield self.members[asn] def add_member(self, member): if member.asn in self.members: return memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members[member.asn] = memb @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for asn in self.members: member = self.members[asn] print("AS{}: {}".format(asn, member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"]))
<commit_before>class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = [] def __iter__(self): for member in self.members: yield member def add_member(self, member): memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members.append(memb) @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for member in self.members: print("AS{}: {}".format(member["asn"], member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"])) <commit_msg>Store data set as dict instead of list<commit_after>
class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = {} def __iter__(self): for asn in self.members: yield self.members[asn] def add_member(self, member): if member.asn in self.members: return memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members[member.asn] = memb @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for asn in self.members: member = self.members[asn] print("AS{}: {}".format(asn, member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"]))
class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = [] def __iter__(self): for member in self.members: yield member def add_member(self, member): memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members.append(memb) @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for member in self.members: print("AS{}: {}".format(member["asn"], member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"])) Store data set as dict instead of listclass PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = {} def __iter__(self): for asn in self.members: yield self.members[asn] def add_member(self, member): if member.asn in self.members: return memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members[member.asn] = memb @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for asn in self.members: member = self.members[asn] print("AS{}: {}".format(asn, member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"]))
<commit_before>class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = [] def __iter__(self): for member in self.members: yield member def add_member(self, member): memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members.append(memb) @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for member in self.members: print("AS{}: {}".format(member["asn"], member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"])) <commit_msg>Store data set as dict instead of list<commit_after>class PrefixList: """ The PrefixList holds the data received from routing registries and the validation results of this data. """ def __init__(self, name): self.name = name self.members = {} def __iter__(self): for asn in self.members: yield self.members[asn] def add_member(self, member): if member.asn in self.members: return memb = { "asn": member.asn, "permit": None, "inet": [], "inet6": [] } for prefix in member.inet: p = { "prefix": prefix, "permit": None } memb["inet"].append(p) for prefix in member.inet6: p = { "prefix": prefix, "permit": None } memb["inet6"].append(p) self.members[member.asn] = memb @classmethod def from_asset(cls, asset): obj = PrefixList(asset.name) for member in asset: obj.add_member(member) return obj def debug(self): for asn in self.members: member = self.members[asn] print("AS{}: {}".format(asn, member["permit"])) for i in member["inet"]: print("--{}: {}".format(i["prefix"], i["permit"])) for i in member["inet6"]: print("--{}: {}".format(i["prefix"], i["permit"]))
0947643977b989ca924bcf932a5153472e362108
plata/utils.py
plata/utils.py
from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value))
from django.core.serializers.json import DjangoJSONEncoder from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value, cls=DjangoJSONEncoder))
Use DjangoJSONEncoder, it knows how to handle dates and decimals
JSONFieldDescriptor: Use DjangoJSONEncoder, it knows how to handle dates and decimals
Python
bsd-3-clause
armicron/plata,stefanklug/plata,allink/plata,armicron/plata,armicron/plata
from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value)) JSONFieldDescriptor: Use DjangoJSONEncoder, it knows how to handle dates and decimals
from django.core.serializers.json import DjangoJSONEncoder from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value, cls=DjangoJSONEncoder))
<commit_before>from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value)) <commit_msg>JSONFieldDescriptor: Use DjangoJSONEncoder, it knows how to handle dates and decimals<commit_after>
from django.core.serializers.json import DjangoJSONEncoder from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value, cls=DjangoJSONEncoder))
from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value)) JSONFieldDescriptor: Use DjangoJSONEncoder, it knows how to handle dates and decimalsfrom django.core.serializers.json import DjangoJSONEncoder from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value, cls=DjangoJSONEncoder))
<commit_before>from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value)) <commit_msg>JSONFieldDescriptor: Use DjangoJSONEncoder, it knows how to handle dates and decimals<commit_after>from django.core.serializers.json import DjangoJSONEncoder from django.utils import simplejson class JSONFieldDescriptor(object): def __init__(self, field): self.field = field def __get__(self, obj, objtype): cache_field = '_cached_jsonfield_%s' % self.field if not hasattr(obj, cache_field): try: setattr(obj, cache_field, simplejson.loads(getattr(obj, self.field))) except (TypeError, ValueError): setattr(obj, cache_field, {}) return getattr(obj, cache_field) def __set__(self, obj, value): setattr(obj, '_cached_jsonfield_%s' % self.field, value) setattr(obj, self.field, simplejson.dumps(value, cls=DjangoJSONEncoder))
d81c64f68aa47581aa8207f858aec8af1bb805d9
wallace/sources.py
wallace/sources.py
from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self, what=None, to_whom=None): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): self.create_information(what=what, to_whom=to_whom) super(Source, self).transmit(to_whom=to_whom, what=what) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self, what=None, to_whom=None): Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)])
from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): info = self.create_information() super(Source, self).transmit(to_whom=to_whom, what=info) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self): info = Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) return info def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)])
Fix bug that arose through grammar tweaking
Fix bug that arose through grammar tweaking
Python
mit
jcpeterson/Dallinger,berkeley-cocosci/Wallace,jcpeterson/Dallinger,jcpeterson/Dallinger,suchow/Wallace,suchow/Wallace,Dallinger/Dallinger,Dallinger/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,suchow/Wallace,berkeley-cocosci/Wallace,Dallinger/Dallinger,berkeley-cocosci/Wallace
from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self, what=None, to_whom=None): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): self.create_information(what=what, to_whom=to_whom) super(Source, self).transmit(to_whom=to_whom, what=what) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self, what=None, to_whom=None): Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)]) Fix bug that arose through grammar tweaking
from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): info = self.create_information() super(Source, self).transmit(to_whom=to_whom, what=info) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self): info = Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) return info def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)])
<commit_before>from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self, what=None, to_whom=None): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): self.create_information(what=what, to_whom=to_whom) super(Source, self).transmit(to_whom=to_whom, what=what) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self, what=None, to_whom=None): Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)]) <commit_msg>Fix bug that arose through grammar tweaking<commit_after>
from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): info = self.create_information() super(Source, self).transmit(to_whom=to_whom, what=info) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self): info = Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) return info def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)])
from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self, what=None, to_whom=None): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): self.create_information(what=what, to_whom=to_whom) super(Source, self).transmit(to_whom=to_whom, what=what) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self, what=None, to_whom=None): Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)]) Fix bug that arose through grammar tweakingfrom .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): info = self.create_information() super(Source, self).transmit(to_whom=to_whom, what=info) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self): info = Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) return info def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)])
<commit_before>from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self, what=None, to_whom=None): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): self.create_information(what=what, to_whom=to_whom) super(Source, self).transmit(to_whom=to_whom, what=what) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self, what=None, to_whom=None): Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)]) <commit_msg>Fix bug that arose through grammar tweaking<commit_after>from .models import Node, Info from sqlalchemy import ForeignKey, Column, String import random class Source(Node): __tablename__ = "source" __mapper_args__ = {"polymorphic_identity": "generic_source"} uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True) def create_information(self): """Generate new information.""" raise NotImplementedError( "You need to overwrite the default create_information.") def transmit(self, what=None, to_whom=None): info = self.create_information() super(Source, self).transmit(to_whom=to_whom, what=info) class RandomBinaryStringSource(Source): """An agent whose genome and memome are random binary strings. The source only transmits; it does not update. """ __mapper_args__ = {"polymorphic_identity": "random_binary_string_source"} def create_information(self): info = Info( origin=self, origin_uuid=self.uuid, contents=self._binary_string()) return info def _binary_string(self): return "".join([str(random.randint(0, 1)) for i in range(2)])
8f0502d618a35b2b63ee280caee91c508482dbf4
services/api/app.py
services/api/app.py
import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: nodes = redis.smembers(IID_INDEX) feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()
import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: # nodes = redis.smembers(IID_INDEX) # all nodes are namespaced with iid nodes = redis.keys('iid:*') feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()
Update fetching nodes to manually get all keys with iid prefix instead of using an index
Update fetching nodes to manually get all keys with iid prefix instead of using an index
Python
mit
pnw/Chch-openhack,pnw/Chch-openhack
import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: nodes = redis.smembers(IID_INDEX) feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()Update fetching nodes to manually get all keys with iid prefix instead of using an index
import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: # nodes = redis.smembers(IID_INDEX) # all nodes are namespaced with iid nodes = redis.keys('iid:*') feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()
<commit_before>import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: nodes = redis.smembers(IID_INDEX) feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()<commit_msg>Update fetching nodes to manually get all keys with iid prefix instead of using an index<commit_after>
import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: # nodes = redis.smembers(IID_INDEX) # all nodes are namespaced with iid nodes = redis.keys('iid:*') feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()
import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: nodes = redis.smembers(IID_INDEX) feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()Update fetching nodes to manually get all keys with iid prefix instead of using an indeximport json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: # nodes = redis.smembers(IID_INDEX) # all nodes are namespaced with iid nodes = redis.keys('iid:*') feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()
<commit_before>import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: nodes = redis.smembers(IID_INDEX) feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()<commit_msg>Update fetching nodes to manually get all keys with iid prefix instead of using an index<commit_after>import json import itertools import logging __author__ = 'patrickwalsh' from flask import Flask from redis import Redis app = Flask(__name__) logger = logging.getLogger(__name__) redis = Redis() IID_INDEX = 'index' @app.route('/intersections') def get_all_intersections(): try: # nodes = redis.smembers(IID_INDEX) # all nodes are namespaced with iid nodes = redis.keys('iid:*') feed = itertools.imap(redis.hgetall, nodes) dehydrated = itertools.imap(dehydrate, feed) return json.dumps(dict( objects=list(dehydrated) )) except Exception as e: logger.exception(e) def dehydrate(node): keys = ['sid', 'id', 'updated_at', 'lat', 'lon', 'osm_id', 'min_cong', 'max_cong', 'avg_cong'] data = {key: node.get(key, None) for key in keys} return data if __name__ == '__main__': app.run()
10ef76977e724cff86361db07a7fcb844d8376e7
scrapi/util.py
scrapi/util.py
from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y')
from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') def create_rename_iterable(documents, source, target, dry): return [(doc, source, target, dry) for doc in documents]
Add scrapi create rename iterable if we want to move to chunks in the fuure
Add scrapi create rename iterable if we want to move to chunks in the fuure
Python
apache-2.0
mehanig/scrapi,fabianvf/scrapi,fabianvf/scrapi,jeffreyliu3230/scrapi,erinspace/scrapi,felliott/scrapi,alexgarciac/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,mehanig/scrapi,erinspace/scrapi,ostwald/scrapi
from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') Add scrapi create rename iterable if we want to move to chunks in the fuure
from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') def create_rename_iterable(documents, source, target, dry): return [(doc, source, target, dry) for doc in documents]
<commit_before>from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') <commit_msg>Add scrapi create rename iterable if we want to move to chunks in the fuure<commit_after>
from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') def create_rename_iterable(documents, source, target, dry): return [(doc, source, target, dry) for doc in documents]
from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') Add scrapi create rename iterable if we want to move to chunks in the fuurefrom datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') def create_rename_iterable(documents, source, target, dry): return [(doc, source, target, dry) for doc in documents]
<commit_before>from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') <commit_msg>Add scrapi create rename iterable if we want to move to chunks in the fuure<commit_after>from datetime import datetime import pytz def timestamp(): return pytz.utc.localize(datetime.utcnow()).isoformat().decode('utf-8') def copy_to_unicode(element): """ used to transform the lxml version of unicode to a standard version of unicode that can be pickalable - necessary for linting """ if isinstance(element, dict): for key, val in element.items(): element[key] = copy_to_unicode(val) elif isinstance(element, list): for idx, item in enumerate(element): element[idx] = copy_to_unicode(item) else: try: # A dirty way to convert to unicode in python 2 + 3.3+ element = u''.join(element) except TypeError: pass return element def stamp_from_raw(raw_doc, **kwargs): kwargs['normalizeFinished'] = timestamp() stamps = raw_doc['timestamps'] stamps.update(kwargs) return stamps def format_date_with_slashes(date): return date.strftime('%m/%d/%Y') def create_rename_iterable(documents, source, target, dry): return [(doc, source, target, dry) for doc in documents]
bab5ed65fb9530b9cd3e9bfabc1e2632da31d106
knowledge_repo/app/auth_providers/ldap.py
knowledge_repo/app/auth_providers/ldap.py
from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Server, Connection, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template(AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME])
from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Connection, Server, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template( AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME])
Fix a lint indent issue
Fix a lint indent issue
Python
apache-2.0
airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo
from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Server, Connection, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template(AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME]) Fix a lint indent issue
from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Connection, Server, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template( AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME])
<commit_before>from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Server, Connection, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template(AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME]) <commit_msg>Fix a lint indent issue<commit_after>
from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Connection, Server, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template( AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME])
from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Server, Connection, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template(AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME]) Fix a lint indent issuefrom ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Connection, Server, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template( AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME])
<commit_before>from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Server, Connection, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template(AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME]) <commit_msg>Fix a lint indent issue<commit_after>from ..auth_provider import KnowledgeAuthProvider from ..models import User from flask import ( redirect, render_template, request, url_for, ) from ldap3 import Connection, Server, ALL from knowledge_repo.constants import AUTH_LOGIN_FORM, LDAP, USERNAME class LdapAuthProvider(KnowledgeAuthProvider): _registry_keys = [LDAP] def init(self): if not self.app.config.get('LDAP_SERVER'): raise RuntimeError( 'Use of LDAP authentication requires specification ' 'of the LDAP_SERVER configuration variable.') self.server = Server(self.app.config['LDAP_SERVER'], get_info=ALL) def prompt(self): return render_template(AUTH_LOGIN_FORM, skip_password=False) def authorize(self): user = self.get_user() if user is None: raise RuntimeError('No such user or invalid credentials') if self.validate(user) is False: return render_template( AUTH_LOGIN_FORM, error_message='Uh-oh, it looks like something in ' 'your credentials was wrong...') self._perform_login(user) return redirect(url_for('index.render_feed')) def validate(self, user): userdn = self.app.config['LDAP_USERDN_SCHEMA'].format(user_id=user.identifier) password = request.form['password'] conn = Connection(self.server, user=userdn, password=password) return conn.bind() def get_user(self): return User(identifier=request.form[USERNAME])
c47f93796bfc4f9026e5451121de7a419ed88e96
lobster/cmssw/data/merge_cfg.py
lobster/cmssw/data/merge_cfg.py
import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.parseArguments() process = cms.Process("PickEvent") process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out)
import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.register('loginterval', 1000, mytype=VarParsing.varType.int) options.parseArguments() process = cms.Process("PickEvent") process.load('FWCore.MessageService.MessageLogger_cfi') process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out)
Trim down merge verbosity to avoid overly large log files.
Trim down merge verbosity to avoid overly large log files.
Python
mit
matz-e/lobster,matz-e/lobster,matz-e/lobster
import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.parseArguments() process = cms.Process("PickEvent") process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out) Trim down merge verbosity to avoid overly large log files.
import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.register('loginterval', 1000, mytype=VarParsing.varType.int) options.parseArguments() process = cms.Process("PickEvent") process.load('FWCore.MessageService.MessageLogger_cfi') process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out)
<commit_before>import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.parseArguments() process = cms.Process("PickEvent") process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out) <commit_msg>Trim down merge verbosity to avoid overly large log files.<commit_after>
import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.register('loginterval', 1000, mytype=VarParsing.varType.int) options.parseArguments() process = cms.Process("PickEvent") process.load('FWCore.MessageService.MessageLogger_cfi') process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out)
import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.parseArguments() process = cms.Process("PickEvent") process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out) Trim down merge verbosity to avoid overly large log files.import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.register('loginterval', 1000, mytype=VarParsing.varType.int) options.parseArguments() process = cms.Process("PickEvent") process.load('FWCore.MessageService.MessageLogger_cfi') process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out)
<commit_before>import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.parseArguments() process = cms.Process("PickEvent") process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out) <commit_msg>Trim down merge verbosity to avoid overly large log files.<commit_after>import FWCore.ParameterSet.Config as cms from FWCore.ParameterSet.VarParsing import VarParsing import subprocess import os import sys options = VarParsing('analysis') options.register('output', mytype=VarParsing.varType.string) options.register('loginterval', 1000, mytype=VarParsing.varType.int) options.parseArguments() process = cms.Process("PickEvent") process.load('FWCore.MessageService.MessageLogger_cfi') process.MessageLogger.cerr.FwkReport.reportEvery = options.loginterval process.source = cms.Source ("PoolSource", fileNames = cms.untracked.vstring(''), duplicateCheckMode = cms.untracked.string('noDuplicateCheck') ) process.out = cms.OutputModule("PoolOutputModule", fileName = cms.untracked.string(options.output) ) process.end = cms.EndPath(process.out)
a63d37f6817098c75d0863ab5513e9de8369f6ff
apps/curia_vista/management/commands/update_all.py
apps/curia_vista/management/commands/update_all.py
from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_affairs import Command as ImportCommandAffairs from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairs, ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start))
from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start))
Remove reference to no longer existing command update_affairs
Remove reference to no longer existing command update_affairs
Python
agpl-3.0
rettichschnidi/politkarma,rettichschnidi/politkarma,rettichschnidi/politkarma,rettichschnidi/politkarma
from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_affairs import Command as ImportCommandAffairs from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairs, ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start)) Remove reference to no longer existing command update_affairs
from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start))
<commit_before>from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_affairs import Command as ImportCommandAffairs from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairs, ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start)) <commit_msg>Remove reference to no longer existing command update_affairs<commit_after>
from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start))
from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_affairs import Command as ImportCommandAffairs from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairs, ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start)) Remove reference to no longer existing command update_affairsfrom timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start))
<commit_before>from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_affairs import Command as ImportCommandAffairs from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairs, ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start)) <commit_msg>Remove reference to no longer existing command update_affairs<commit_after>from timeit import default_timer as timer from django.core.management.base import BaseCommand from apps.curia_vista.management.commands.update_affair_summaries import Command as ImportCommandAffairSummaries from apps.curia_vista.management.commands.update_committee import Command as ImportCommandCommittee from apps.curia_vista.management.commands.update_councillors import Command as ImportCommandCouncillors from apps.curia_vista.management.commands.update_factions import Command as ImportCommandFactions from apps.curia_vista.management.commands.update_parties import Command as ImportCommandParties from apps.curia_vista.management.commands.update_sessions import Command as ImportCommandSessions class Command(BaseCommand): help = 'Import/update all data from parlament.ch' commands = [ ImportCommandAffairSummaries, ImportCommandCommittee, ImportCommandCouncillors, ImportCommandFactions, ImportCommandParties, ImportCommandSessions ] def handle(self, *args, **options): for cmd_class in Command.commands: start = timer() cmd_class().handle(args, options) self.stdout.write("Command '{0}' has been executed with arguments '{1}' and options '{2}'. Duration: {3}s" .format(cmd_class, args, options, timer() - start))
4bd16d369cc9c89973247afee6ee5ab28eeee014
tests/providers/test_dnsimple.py
tests/providers/test_dnsimple.py
# Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie']
# Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie','X-Dnsimple-OTP']
Add OTP to test filters
Add OTP to test filters
Python
mit
AnalogJ/lexicon,tnwhitwell/lexicon,tnwhitwell/lexicon,AnalogJ/lexicon
# Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie'] Add OTP to test filters
# Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie','X-Dnsimple-OTP']
<commit_before># Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie'] <commit_msg>Add OTP to test filters<commit_after>
# Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie','X-Dnsimple-OTP']
# Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie'] Add OTP to test filters# Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie','X-Dnsimple-OTP']
<commit_before># Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie'] <commit_msg>Add OTP to test filters<commit_after># Test for one implementation of the interface from lexicon.providers.dnsimple import Provider from integration_tests import IntegrationTests from unittest import TestCase # Hook into testing framework by inheriting unittest.TestCase and reuse # the tests which *each and every* implementation of the interface must # pass, by inheritance from define_tests.TheTests class DnsimpleProviderTests(TestCase, IntegrationTests): Provider = Provider provider_name = 'dnsimple' domain = 'wopr.tech' provider_opts = {'api_endpoint': 'https://api.sandbox.dnsimple.com/v2'} def _filter_headers(self): return ['Authorization','set-cookie','X-Dnsimple-OTP']
b1a28600e6b97ab020c69ff410aebd962b4e1e93
testproject/tablib_test/tests.py
testproject/tablib_test/tests.py
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1])
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) def test_meta_fields(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['field1'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_exclude(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_both(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['id', 'field1'] exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers)
Test that specifying fields and exclude in ModelDataset.Meta works.
Test that specifying fields and exclude in ModelDataset.Meta works.
Python
mit
joshourisman/django-tablib,ebrelsford/django-tablib,joshourisman/django-tablib,ebrelsford/django-tablib
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) Test that specifying fields and exclude in ModelDataset.Meta works.
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) def test_meta_fields(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['field1'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_exclude(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_both(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['id', 'field1'] exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers)
<commit_before>from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) <commit_msg>Test that specifying fields and exclude in ModelDataset.Meta works.<commit_after>
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) def test_meta_fields(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['field1'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_exclude(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_both(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['id', 'field1'] exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers)
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) Test that specifying fields and exclude in ModelDataset.Meta works.from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) def test_meta_fields(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['field1'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_exclude(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_both(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['id', 'field1'] exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers)
<commit_before>from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) <commit_msg>Test that specifying fields and exclude in ModelDataset.Meta works.<commit_after>from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) def test_meta_fields(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['field1'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_exclude(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_both(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['id', 'field1'] exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers)
95d3401b29d2cba2d282256cdd2513c67e3df858
ipython_notebook_config.py
ipython_notebook_config.py
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'Content-Security-Policy': "frame-ancestors 'self' *.nature.com", 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
Set the content security policy
Set the content security policy
Python
bsd-3-clause
jupyter/nature-demo,jupyter/nature-demo,jupyter/nature-demo
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] } Set the content security policy
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'Content-Security-Policy': "frame-ancestors 'self' *.nature.com", 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
<commit_before># Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] } <commit_msg>Set the content security policy<commit_after>
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'Content-Security-Policy': "frame-ancestors 'self' *.nature.com", 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] } Set the content security policy# Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'Content-Security-Policy': "frame-ancestors 'self' *.nature.com", 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
<commit_before># Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] } <commit_msg>Set the content security policy<commit_after># Configuration file for ipython-notebook. c = get_config() c.NotebookApp.ip = '*' c.NotebookApp.open_browser = False c.NotebookApp.port = 8888 # Whether to trust or not X-Scheme/X-Forwarded-Proto and X-Real-Ip/X-Forwarded- # For headerssent by the upstream reverse proxy. Necessary if the proxy handles # SSL c.NotebookApp.trust_xheaders = True # Supply overrides for the tornado.web.Application that the IPython notebook # uses. c.NotebookApp.webapp_settings = { 'X-Frame-Options': 'ALLOW FROM nature.com', 'Content-Security-Policy': "frame-ancestors 'self' *.nature.com", 'template_path':['/srv/ga/', '/srv/ipython/IPython/html', '/srv/ipython/IPython/html/templates'] }
fe41ecce4b840374a561bbef0bbf4ad465e66180
tests/ml/test_fasttext_helpers.py
tests/ml/test_fasttext_helpers.py
import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) if __name__ == '__main__': unittest.main()
import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' test_path = 'ft_simple_test.txt' probability_path = 'ft_simple_prob.txt' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) def test_test_call_parameters(self): predict_call = fth.get_fasttext_test_calls(self.test_path, self.ft_path, self.model_path, self.probability_path) expected_predict_call = self.ft_path + ' predict-prob ' + self.model_path + ' ' + self.test_path + ' ' + \ str(2) + ' | gzip > ' + self.probability_path self.assertEqual(predict_call, expected_predict_call) if __name__ == '__main__': unittest.main()
Add unittest for testing file path
Add unittest for testing file path
Python
mit
JungeAlexander/cocoscore
import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) if __name__ == '__main__': unittest.main() Add unittest for testing file path
import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' test_path = 'ft_simple_test.txt' probability_path = 'ft_simple_prob.txt' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) def test_test_call_parameters(self): predict_call = fth.get_fasttext_test_calls(self.test_path, self.ft_path, self.model_path, self.probability_path) expected_predict_call = self.ft_path + ' predict-prob ' + self.model_path + ' ' + self.test_path + ' ' + \ str(2) + ' | gzip > ' + self.probability_path self.assertEqual(predict_call, expected_predict_call) if __name__ == '__main__': unittest.main()
<commit_before>import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) if __name__ == '__main__': unittest.main() <commit_msg>Add unittest for testing file path<commit_after>
import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' test_path = 'ft_simple_test.txt' probability_path = 'ft_simple_prob.txt' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) def test_test_call_parameters(self): predict_call = fth.get_fasttext_test_calls(self.test_path, self.ft_path, self.model_path, self.probability_path) expected_predict_call = self.ft_path + ' predict-prob ' + self.model_path + ' ' + self.test_path + ' ' + \ str(2) + ' | gzip > ' + self.probability_path self.assertEqual(predict_call, expected_predict_call) if __name__ == '__main__': unittest.main()
import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) if __name__ == '__main__': unittest.main() Add unittest for testing file pathimport pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' test_path = 'ft_simple_test.txt' probability_path = 'ft_simple_prob.txt' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) def test_test_call_parameters(self): predict_call = fth.get_fasttext_test_calls(self.test_path, self.ft_path, self.model_path, self.probability_path) expected_predict_call = self.ft_path + ' predict-prob ' + self.model_path + ' ' + self.test_path + ' ' + \ str(2) + ' | gzip > ' + self.probability_path self.assertEqual(predict_call, expected_predict_call) if __name__ == '__main__': unittest.main()
<commit_before>import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) if __name__ == '__main__': unittest.main() <commit_msg>Add unittest for testing file path<commit_after>import pandas import unittest import cocoscore.ml.fasttext_helpers as fth class CVTest(unittest.TestCase): train_path = 'ft_simple_test.txt' ft_path = '/home/lib/fastText' model_path = 'testmodel' test_path = 'ft_simple_test.txt' probability_path = 'ft_simple_prob.txt' def test_train_call_parameters(self): train_call, compress_call = fth.get_fasttext_train_calls(self.train_path, {'-aaa': 1.0}, self.ft_path, self.model_path, thread=5) expected_train_call = self.ft_path + ' supervised -input ' + self.train_path + ' -output ' + self.model_path + \ ' -aaa 1.0 -thread 5 ' self.assertEqual(train_call, expected_train_call) expected_compress_call = self.ft_path + ' quantize -input ' + self.model_path + ' -output ' + self.model_path self.assertEqual(compress_call, expected_compress_call) def test_test_call_parameters(self): predict_call = fth.get_fasttext_test_calls(self.test_path, self.ft_path, self.model_path, self.probability_path) expected_predict_call = self.ft_path + ' predict-prob ' + self.model_path + ' ' + self.test_path + ' ' + \ str(2) + ' | gzip > ' + self.probability_path self.assertEqual(predict_call, expected_predict_call) if __name__ == '__main__': unittest.main()
fd1590ad0ceab26e281c58aefeac1365a3f332d5
tests/test_lib_tokens_webauthn.py
tests/test_lib_tokens_webauthn.py
""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE RP_ID = 'example.com' RP_NAME = 'ACME' class WebAuthnTokenTestCase(MyTestCase): def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ import unittest from copy import copy from privacyidea.lib.tokens import webauthn from privacyidea.lib.tokens.webauthn import COSEALGORITHM from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE class WebAuthnTokenTestCase(MyTestCase): RP_ID = 'example.com' RP_NAME = 'ACME' def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
Add testing for the WebAuthn implementation
Add testing for the WebAuthn implementation
Python
agpl-3.0
privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea,privacyidea/privacyidea
""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE RP_ID = 'example.com' RP_NAME = 'ACME' class WebAuthnTokenTestCase(MyTestCase): def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn") Add testing for the WebAuthn implementation
""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ import unittest from copy import copy from privacyidea.lib.tokens import webauthn from privacyidea.lib.tokens.webauthn import COSEALGORITHM from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE class WebAuthnTokenTestCase(MyTestCase): RP_ID = 'example.com' RP_NAME = 'ACME' def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
<commit_before>""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE RP_ID = 'example.com' RP_NAME = 'ACME' class WebAuthnTokenTestCase(MyTestCase): def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn") <commit_msg>Add testing for the WebAuthn implementation<commit_after>
""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ import unittest from copy import copy from privacyidea.lib.tokens import webauthn from privacyidea.lib.tokens.webauthn import COSEALGORITHM from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE class WebAuthnTokenTestCase(MyTestCase): RP_ID = 'example.com' RP_NAME = 'ACME' def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE RP_ID = 'example.com' RP_NAME = 'ACME' class WebAuthnTokenTestCase(MyTestCase): def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn") Add testing for the WebAuthn implementation""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ import unittest from copy import copy from privacyidea.lib.tokens import webauthn from privacyidea.lib.tokens.webauthn import COSEALGORITHM from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE class WebAuthnTokenTestCase(MyTestCase): RP_ID = 'example.com' RP_NAME = 'ACME' def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
<commit_before>""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE RP_ID = 'example.com' RP_NAME = 'ACME' class WebAuthnTokenTestCase(MyTestCase): def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn") <commit_msg>Add testing for the WebAuthn implementation<commit_after>""" This test file tests the lib.tokens.webauthntoken, along with lib.tokens.webauthn. This depends on lib.tokenclass """ import unittest from copy import copy from privacyidea.lib.tokens import webauthn from privacyidea.lib.tokens.webauthn import COSEALGORITHM from .base import MyTestCase from privacyidea.lib.tokens.webauthntoken import WebAuthnTokenClass, WEBAUTHNACTION from privacyidea.lib.token import init_token from privacyidea.lib.policy import set_policy, SCOPE class WebAuthnTokenTestCase(MyTestCase): RP_ID = 'example.com' RP_NAME = 'ACME' def test_00_users(self): self.setUp_user_realms() set_policy(name="WebAuthn", scope=SCOPE.ENROLL, action=WEBAUTHNACTION.RELYING_PARTY_NAME+"="+self.RP_NAME+"," +WEBAUTHNACTION.RELYING_PARTY_ID+"="+self.RP_ID) def test_01_create_token(self): pin = "1234" # # Init step 1 # token = init_token({'type': 'webauthn', 'pin': pin}) serial = token.token.serial self.assertEqual(token.type, "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_prefix(), "WAN") self.assertEqual(WebAuthnTokenClass.get_class_info().get('type'), "webauthn") self.assertEqual(WebAuthnTokenClass.get_class_info('type'), "webauthn")
7c16ad1dbe97a2f06968f508e605485e86751a5b
tests/utils.py
tests/utils.py
import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None, debug=False): try: tmpl = compiler.kompile(source, debug=debug) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg)
import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None): try: tmpl = compiler.kompile(source) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg)
Remove debug flag from tests
Remove debug flag from tests
Python
mit
funkybob/knights-templater,funkybob/knights-templater
import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None, debug=False): try: tmpl = compiler.kompile(source, debug=debug) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg) Remove debug flag from tests
import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None): try: tmpl = compiler.kompile(source) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg)
<commit_before>import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None, debug=False): try: tmpl = compiler.kompile(source, debug=debug) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg) <commit_msg>Remove debug flag from tests<commit_after>
import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None): try: tmpl = compiler.kompile(source) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg)
import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None, debug=False): try: tmpl = compiler.kompile(source, debug=debug) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg) Remove debug flag from testsimport unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None): try: tmpl = compiler.kompile(source) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg)
<commit_before>import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None, debug=False): try: tmpl = compiler.kompile(source, debug=debug) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg) <commit_msg>Remove debug flag from tests<commit_after>import unittest from knights import compiler class Mock(object): def __init__(self, **kwargs): for k, v in kwargs.items(): setattr(self, k, v) class TemplateTestCase(unittest.TestCase): def assertRendered(self, source, expected, context=None): try: tmpl = compiler.kompile(source) rendered = tmpl({} if context is None else context) self.assertEqual(rendered, expected) except Exception as e: if hasattr(e, 'message'): standardMsg = e.message elif hasattr(e, 'args') and len(e.args) > 0: standardMsg = e.args[0] else: standardMsg = '' msg = 'Failed rendering template %s:\n%s: %s' % ( source, e.__class__.__name__, standardMsg) self.fail(msg)
7e7b0ce7c31c50bdcfaf80d950206e58401c5a8c
workshopvenues/venues/models.py
workshopvenues/venues/models.py
from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name
from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) country = models.CharField(max_length=30, blank=True) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name
Add country field to Address model
Add country field to Address model
Python
bsd-3-clause
andreagrandi/workshopvenues
from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name Add country field to Address model
from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) country = models.CharField(max_length=30, blank=True) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name
<commit_before>from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name <commit_msg>Add country field to Address model<commit_after>
from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) country = models.CharField(max_length=30, blank=True) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name
from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name Add country field to Address modelfrom django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) country = models.CharField(max_length=30, blank=True) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name
<commit_before>from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name <commit_msg>Add country field to Address model<commit_after>from django.db import models class Facility(models.Model): name = models.CharField(max_length=30) def __unicode__(self): return self.name class Address(models.Model): street = models.CharField(max_length=200) town = models.CharField(max_length=30) postcode = models.CharField(max_length=10) country = models.CharField(max_length=30, blank=True) class Venue(models.Model): name = models.CharField(max_length=30) website = models.CharField(max_length=50) address = models.ForeignKey(Address) facilities = models.ManyToManyField(Facility) def __unicode__(self): return self.name
e20d0725e47ea6fe671f7889c02f212962963083
pyinstaller/hook-googleapiclient.model.py
pyinstaller/hook-googleapiclient.model.py
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__'])
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) datas += collect_data_files('googleapiclient.discovery_cache', excludes=['*.txt', '**/__pycache__'])
Fix pyinstall-build by including discovery cache documents.
Fix pyinstall-build by including discovery cache documents. Produced an error related to serviceusage API. Change-Id: Idf6b83912c3e71e7081ef1b6b0a2836a18723542 GitOrigin-RevId: e38c69cfc7269177570a8aa8c23f8eaa2d32ddd2
Python
apache-2.0
GoogleCloudPlatform/gcpdiag,GoogleCloudPlatform/gcpdiag,GoogleCloudPlatform/gcpdiag
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) Fix pyinstall-build by including discovery cache documents. Produced an error related to serviceusage API. Change-Id: Idf6b83912c3e71e7081ef1b6b0a2836a18723542 GitOrigin-RevId: e38c69cfc7269177570a8aa8c23f8eaa2d32ddd2
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) datas += collect_data_files('googleapiclient.discovery_cache', excludes=['*.txt', '**/__pycache__'])
<commit_before># Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) <commit_msg>Fix pyinstall-build by including discovery cache documents. Produced an error related to serviceusage API. Change-Id: Idf6b83912c3e71e7081ef1b6b0a2836a18723542 GitOrigin-RevId: e38c69cfc7269177570a8aa8c23f8eaa2d32ddd2<commit_after>
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) datas += collect_data_files('googleapiclient.discovery_cache', excludes=['*.txt', '**/__pycache__'])
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) Fix pyinstall-build by including discovery cache documents. Produced an error related to serviceusage API. Change-Id: Idf6b83912c3e71e7081ef1b6b0a2836a18723542 GitOrigin-RevId: e38c69cfc7269177570a8aa8c23f8eaa2d32ddd2# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) datas += collect_data_files('googleapiclient.discovery_cache', excludes=['*.txt', '**/__pycache__'])
<commit_before># Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) <commit_msg>Fix pyinstall-build by including discovery cache documents. Produced an error related to serviceusage API. Change-Id: Idf6b83912c3e71e7081ef1b6b0a2836a18723542 GitOrigin-RevId: e38c69cfc7269177570a8aa8c23f8eaa2d32ddd2<commit_after># Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """pyinstaller configuration for googleapiclient.""" from PyInstaller.utils.hooks import collect_data_files, copy_metadata # googleapiclient.model queries the library version via # pkg_resources.get_distribution("google-api-python-client").version, # so we need to collect that package's metadata datas = copy_metadata('google_api_python_client') datas += collect_data_files('googleapiclient.discovery', excludes=['*.txt', '**/__pycache__']) datas += collect_data_files('googleapiclient.discovery_cache', excludes=['*.txt', '**/__pycache__'])
43282f7f1c9094691f64543b284ee06813e4d6a2
singleuser/user-fixes.py
singleuser/user-fixes.py
import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path
import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path
Use 'rb' mode for reading files
Use 'rb' mode for reading files To stick to pwb conventions
Python
mit
yuvipanda/paws,yuvipanda/paws
import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path Use 'rb' mode for reading files To stick to pwb conventions
import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path
<commit_before>import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path <commit_msg>Use 'rb' mode for reading files To stick to pwb conventions<commit_after>
import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path
import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path Use 'rb' mode for reading files To stick to pwb conventionsimport os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path
<commit_before>import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path <commit_msg>Use 'rb' mode for reading files To stick to pwb conventions<commit_after>import os custom_path = os.path.expanduser('~/user-fixes.py') if os.path.exists(custom_path): with open(custom_path, 'rb') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) del f # Clean up temp variables, since pwb issues a warning otherwise # to help people catch misspelt config del custom_path
97b2e90f4f9a4f3c08f4556856aec1d31b44749a
flocker/control/_clusterstate.py
flocker/control/_clusterstate.py
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. (Follow up issue will deal with semantics of expiring data, which should happen so stale information isn't stored. This needs some extra work for the agent resending state even when it doesn't change, etc..) """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()]))
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with semantics of expiring data, which should happen so stale information isn't treated as correct. """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()]))
Address review comment: Link to issue.
Address review comment: Link to issue.
Python
apache-2.0
achanda/flocker,runcom/flocker,Azulinho/flocker,mbrukman/flocker,jml/flocker,moypray/flocker,AndyHuu/flocker,agonzalezro/flocker,Azulinho/flocker,moypray/flocker,w4ngyi/flocker,moypray/flocker,jml/flocker,LaynePeng/flocker,hackday-profilers/flocker,adamtheturtle/flocker,LaynePeng/flocker,1d4Nf6/flocker,w4ngyi/flocker,adamtheturtle/flocker,AndyHuu/flocker,runcom/flocker,wallnerryan/flocker-profiles,w4ngyi/flocker,mbrukman/flocker,lukemarsden/flocker,jml/flocker,AndyHuu/flocker,achanda/flocker,agonzalezro/flocker,hackday-profilers/flocker,Azulinho/flocker,hackday-profilers/flocker,lukemarsden/flocker,achanda/flocker,agonzalezro/flocker,lukemarsden/flocker,1d4Nf6/flocker,LaynePeng/flocker,runcom/flocker,adamtheturtle/flocker,wallnerryan/flocker-profiles,1d4Nf6/flocker,wallnerryan/flocker-profiles,mbrukman/flocker
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. (Follow up issue will deal with semantics of expiring data, which should happen so stale information isn't stored. This needs some extra work for the agent resending state even when it doesn't change, etc..) """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()])) Address review comment: Link to issue.
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with semantics of expiring data, which should happen so stale information isn't treated as correct. """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()]))
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. (Follow up issue will deal with semantics of expiring data, which should happen so stale information isn't stored. This needs some extra work for the agent resending state even when it doesn't change, etc..) """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()])) <commit_msg>Address review comment: Link to issue.<commit_after>
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with semantics of expiring data, which should happen so stale information isn't treated as correct. """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()]))
# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. (Follow up issue will deal with semantics of expiring data, which should happen so stale information isn't stored. This needs some extra work for the agent resending state even when it doesn't change, etc..) """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()])) Address review comment: Link to issue.# Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with semantics of expiring data, which should happen so stale information isn't treated as correct. """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()]))
<commit_before># Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. (Follow up issue will deal with semantics of expiring data, which should happen so stale information isn't stored. This needs some extra work for the agent resending state even when it doesn't change, etc..) """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()])) <commit_msg>Address review comment: Link to issue.<commit_after># Copyright Hybrid Logic Ltd. See LICENSE file for details. """ Combine and retrieve current cluster state. """ from twisted.application.service import Service from ._model import Deployment, Node class ClusterStateService(Service): """ Store known current cluster state, and combine partial updates with the existing known state. https://clusterhq.atlassian.net/browse/FLOC-1269 will deal with semantics of expiring data, which should happen so stale information isn't treated as correct. """ def __init__(self): self._nodes = {} def update_node_state(self, hostname, node_state): """ Update the state of a given node. :param unicode hostname: The node's identifier. :param NodeState node_state: The state of the node. """ self._nodes[hostname] = node_state def as_deployment(self): """ Return cluster state as a Deployment object. """ return Deployment(nodes=frozenset([ Node(hostname=hostname, applications=frozenset( node_state.running + node_state.not_running)) for hostname, node_state in self._nodes.items()]))
9044f377d3018e7589f16126e65bcea173576918
joby/tests/test_data_science_jobs.py
joby/tests/test_data_science_jobs.py
""" Test the data_science_jobs spider. """ from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': 'Data Scientist', 'apply_url': 'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': date(2015, 2, 3), 'contract_type': 'Employee', 'workload': 'full-time', 'allows_remote': 'negotiable', 'duration': 'unlimited', } parser = make_offline_parser('data_science_jobs', 'DataScienceJobsJobParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields
""" Test the data_science_jobs spider. """ from joby.spiders.data_science_jobs import DataScienceJobsSpider from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': u'Data Scientist', 'apply_url': u'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': str(date.today() - date(2015, 2, 2)).split(',')[0], 'days_since_posted': str(date.today() - date(2015, 2, 2)).split(',')[0], 'contract_type': u'Employee', 'workload': u'full-time', 'allows_remote': u'negotiable', 'duration': u'unlimited', } parser = make_offline_parser(DataScienceJobsSpider, 'data_science_jobs', 'DataScienceJobsParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields
Add new Parser class argument (spider) and fix test parameters.
Add new Parser class argument (spider) and fix test parameters.
Python
mit
cyberbikepunk/job-spiders
""" Test the data_science_jobs spider. """ from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': 'Data Scientist', 'apply_url': 'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': date(2015, 2, 3), 'contract_type': 'Employee', 'workload': 'full-time', 'allows_remote': 'negotiable', 'duration': 'unlimited', } parser = make_offline_parser('data_science_jobs', 'DataScienceJobsJobParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields Add new Parser class argument (spider) and fix test parameters.
""" Test the data_science_jobs spider. """ from joby.spiders.data_science_jobs import DataScienceJobsSpider from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': u'Data Scientist', 'apply_url': u'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': str(date.today() - date(2015, 2, 2)).split(',')[0], 'days_since_posted': str(date.today() - date(2015, 2, 2)).split(',')[0], 'contract_type': u'Employee', 'workload': u'full-time', 'allows_remote': u'negotiable', 'duration': u'unlimited', } parser = make_offline_parser(DataScienceJobsSpider, 'data_science_jobs', 'DataScienceJobsParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields
<commit_before>""" Test the data_science_jobs spider. """ from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': 'Data Scientist', 'apply_url': 'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': date(2015, 2, 3), 'contract_type': 'Employee', 'workload': 'full-time', 'allows_remote': 'negotiable', 'duration': 'unlimited', } parser = make_offline_parser('data_science_jobs', 'DataScienceJobsJobParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields <commit_msg>Add new Parser class argument (spider) and fix test parameters.<commit_after>
""" Test the data_science_jobs spider. """ from joby.spiders.data_science_jobs import DataScienceJobsSpider from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': u'Data Scientist', 'apply_url': u'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': str(date.today() - date(2015, 2, 2)).split(',')[0], 'days_since_posted': str(date.today() - date(2015, 2, 2)).split(',')[0], 'contract_type': u'Employee', 'workload': u'full-time', 'allows_remote': u'negotiable', 'duration': u'unlimited', } parser = make_offline_parser(DataScienceJobsSpider, 'data_science_jobs', 'DataScienceJobsParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields
""" Test the data_science_jobs spider. """ from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': 'Data Scientist', 'apply_url': 'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': date(2015, 2, 3), 'contract_type': 'Employee', 'workload': 'full-time', 'allows_remote': 'negotiable', 'duration': 'unlimited', } parser = make_offline_parser('data_science_jobs', 'DataScienceJobsJobParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields Add new Parser class argument (spider) and fix test parameters.""" Test the data_science_jobs spider. """ from joby.spiders.data_science_jobs import DataScienceJobsSpider from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': u'Data Scientist', 'apply_url': u'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': str(date.today() - date(2015, 2, 2)).split(',')[0], 'days_since_posted': str(date.today() - date(2015, 2, 2)).split(',')[0], 'contract_type': u'Employee', 'workload': u'full-time', 'allows_remote': u'negotiable', 'duration': u'unlimited', } parser = make_offline_parser(DataScienceJobsSpider, 'data_science_jobs', 'DataScienceJobsParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields
<commit_before>""" Test the data_science_jobs spider. """ from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': 'Data Scientist', 'apply_url': 'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': date(2015, 2, 3), 'contract_type': 'Employee', 'workload': 'full-time', 'allows_remote': 'negotiable', 'duration': 'unlimited', } parser = make_offline_parser('data_science_jobs', 'DataScienceJobsJobParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields <commit_msg>Add new Parser class argument (spider) and fix test parameters.<commit_after>""" Test the data_science_jobs spider. """ from joby.spiders.data_science_jobs import DataScienceJobsSpider from joby.items import JobLoader, Job from joby.tests.utilities import make_offline_parser from datetime import date TEST_URL = 'http://www.data-science-jobs.com/detail/20' # noinspection PyShadowingNames def test_parse_overview_table(): expected_fields = { 'job_category': u'Data Scientist', 'apply_url': u'https://mydis.dis-ag.com/kd_1/Registration.aspx?tID=1&lID=0{0}', 'publication_date': str(date.today() - date(2015, 2, 2)).split(',')[0], 'days_since_posted': str(date.today() - date(2015, 2, 2)).split(',')[0], 'contract_type': u'Employee', 'workload': u'full-time', 'allows_remote': u'negotiable', 'duration': u'unlimited', } parser = make_offline_parser(DataScienceJobsSpider, 'data_science_jobs', 'DataScienceJobsParser', TEST_URL, ('job', Job, JobLoader)) parser.parse_job_overview() parser.job.load_item() assert dict(parser.job.item) == expected_fields
e874e98c95c26381056aacc731b24193800b8670
ansible-tests/validations-api.py
ansible-tests/validations-api.py
#!/usr/bin/env python from flask import Flask, abort, jsonify import validations app = Flask(__name__) @app.route('/') def index(): return jsonify({"msg": "Hello World!"}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return jsonify({'validations': result}) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return jsonify({ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: abort(404) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): return jsonify({ 'TODO': "run the given validation", }) app.run(debug=True)
#!/usr/bin/env python from flask import Flask, abort, json, make_response import validations app = Flask(__name__) def json_response(code, result): # NOTE: flask.jsonify doesn't handle lists, so we need to do it manually: response = make_response(json.dumps(result), code) response.headers['Content-Type'] = 'application/json' response.code = code return response @app.route('/') def index(): return json_response(200, {"TODO": "Print the API routes."}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return json_response(200, result) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return json_response(200, { 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: return json_response(404, {}) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): try: validation = validations.get_all()[uuid] # TODO: this blocks. Run it in the background results = validations.run(validation) #return json_response(204, results) return json_response(200, results) except KeyError: return json_response(404, {}) app.run(debug=True)
Use a custom response renderer
Use a custom response renderer This lets us specify the error codes and make sure we always return json.
Python
apache-2.0
coolsvap/clapper,coolsvap/clapper,rthallisey/clapper,rthallisey/clapper,coolsvap/clapper
#!/usr/bin/env python from flask import Flask, abort, jsonify import validations app = Flask(__name__) @app.route('/') def index(): return jsonify({"msg": "Hello World!"}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return jsonify({'validations': result}) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return jsonify({ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: abort(404) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): return jsonify({ 'TODO': "run the given validation", }) app.run(debug=True) Use a custom response renderer This lets us specify the error codes and make sure we always return json.
#!/usr/bin/env python from flask import Flask, abort, json, make_response import validations app = Flask(__name__) def json_response(code, result): # NOTE: flask.jsonify doesn't handle lists, so we need to do it manually: response = make_response(json.dumps(result), code) response.headers['Content-Type'] = 'application/json' response.code = code return response @app.route('/') def index(): return json_response(200, {"TODO": "Print the API routes."}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return json_response(200, result) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return json_response(200, { 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: return json_response(404, {}) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): try: validation = validations.get_all()[uuid] # TODO: this blocks. Run it in the background results = validations.run(validation) #return json_response(204, results) return json_response(200, results) except KeyError: return json_response(404, {}) app.run(debug=True)
<commit_before>#!/usr/bin/env python from flask import Flask, abort, jsonify import validations app = Flask(__name__) @app.route('/') def index(): return jsonify({"msg": "Hello World!"}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return jsonify({'validations': result}) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return jsonify({ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: abort(404) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): return jsonify({ 'TODO': "run the given validation", }) app.run(debug=True) <commit_msg>Use a custom response renderer This lets us specify the error codes and make sure we always return json.<commit_after>
#!/usr/bin/env python from flask import Flask, abort, json, make_response import validations app = Flask(__name__) def json_response(code, result): # NOTE: flask.jsonify doesn't handle lists, so we need to do it manually: response = make_response(json.dumps(result), code) response.headers['Content-Type'] = 'application/json' response.code = code return response @app.route('/') def index(): return json_response(200, {"TODO": "Print the API routes."}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return json_response(200, result) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return json_response(200, { 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: return json_response(404, {}) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): try: validation = validations.get_all()[uuid] # TODO: this blocks. Run it in the background results = validations.run(validation) #return json_response(204, results) return json_response(200, results) except KeyError: return json_response(404, {}) app.run(debug=True)
#!/usr/bin/env python from flask import Flask, abort, jsonify import validations app = Flask(__name__) @app.route('/') def index(): return jsonify({"msg": "Hello World!"}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return jsonify({'validations': result}) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return jsonify({ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: abort(404) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): return jsonify({ 'TODO': "run the given validation", }) app.run(debug=True) Use a custom response renderer This lets us specify the error codes and make sure we always return json.#!/usr/bin/env python from flask import Flask, abort, json, make_response import validations app = Flask(__name__) def json_response(code, result): # NOTE: flask.jsonify doesn't handle lists, so we need to do it manually: response = make_response(json.dumps(result), code) response.headers['Content-Type'] = 'application/json' response.code = code return response @app.route('/') def index(): return json_response(200, {"TODO": "Print the API routes."}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return json_response(200, result) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return json_response(200, { 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: return json_response(404, {}) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): try: validation = validations.get_all()[uuid] # TODO: this blocks. Run it in the background results = validations.run(validation) #return json_response(204, results) return json_response(200, results) except KeyError: return json_response(404, {}) app.run(debug=True)
<commit_before>#!/usr/bin/env python from flask import Flask, abort, jsonify import validations app = Flask(__name__) @app.route('/') def index(): return jsonify({"msg": "Hello World!"}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return jsonify({'validations': result}) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return jsonify({ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: abort(404) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): return jsonify({ 'TODO': "run the given validation", }) app.run(debug=True) <commit_msg>Use a custom response renderer This lets us specify the error codes and make sure we always return json.<commit_after>#!/usr/bin/env python from flask import Flask, abort, json, make_response import validations app = Flask(__name__) def json_response(code, result): # NOTE: flask.jsonify doesn't handle lists, so we need to do it manually: response = make_response(json.dumps(result), code) response.headers['Content-Type'] = 'application/json' response.code = code return response @app.route('/') def index(): return json_response(200, {"TODO": "Print the API routes."}) @app.route('/v1/validations/') def list_validations(): result = [{ 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], } for validation in validations.get_all().values()] return json_response(200, result) @app.route('/v1/validations/<uuid>/') def show_validation(uuid): try: validation = validations.get_all()[uuid] return json_response(200, { 'uuid': validation['uuid'], 'ref': '/v1/validations/' + validation['uuid'], 'name': validation['name'], 'status': 'new', 'results': [], }) except KeyError: return json_response(404, {}) @app.route('/v1/validations/<uuid>/run', methods=['PUT']) def run_validation(uuid): try: validation = validations.get_all()[uuid] # TODO: this blocks. Run it in the background results = validations.run(validation) #return json_response(204, results) return json_response(200, results) except KeyError: return json_response(404, {}) app.run(debug=True)
9ec8d2b01e0f8aefc9d4c2c82c22af6f8c48a75b
usingnamespace/api/interfaces.py
usingnamespace/api/interfaces.py
from zope.interface import Interface class ISerializer(Interface): """Marker Interface"""
from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" class IDigestMethod(Interface): """Marker Interface"""
Add new marker interface for a digest method
Add new marker interface for a digest method
Python
isc
usingnamespace/usingnamespace
from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" Add new marker interface for a digest method
from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" class IDigestMethod(Interface): """Marker Interface"""
<commit_before>from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" <commit_msg>Add new marker interface for a digest method<commit_after>
from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" class IDigestMethod(Interface): """Marker Interface"""
from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" Add new marker interface for a digest methodfrom zope.interface import Interface class ISerializer(Interface): """Marker Interface""" class IDigestMethod(Interface): """Marker Interface"""
<commit_before>from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" <commit_msg>Add new marker interface for a digest method<commit_after>from zope.interface import Interface class ISerializer(Interface): """Marker Interface""" class IDigestMethod(Interface): """Marker Interface"""
a748d217ec3f09e8b477203f1a3a0ebf060714d5
scality_sproxyd_client/__init__.py
scality_sproxyd_client/__init__.py
__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9']
__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9,<2.0']
Set upper bound to urllib3
Set upper bound to urllib3
Python
apache-2.0
scality/scality-sproxyd-client
__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9'] Set upper bound to urllib3
__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9,<2.0']
<commit_before>__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9'] <commit_msg>Set upper bound to urllib3<commit_after>
__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9,<2.0']
__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9'] Set upper bound to urllib3__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9,<2.0']
<commit_before>__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9'] <commit_msg>Set upper bound to urllib3<commit_after>__requires__ = ['eventlet>=0.9.15', 'urllib3>=1.9,<2.0']
343c5eb47510f784588e425619c43df916a40fe7
delivery/services/external_program_service.py
delivery/services/external_program_service.py
import subprocess import atexit from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) # On exiting the main program, make sure that the subprocess # gets killed. atexit.register(p.terminate) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
import subprocess from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
Remove at exit handler it doesnt work...
Remove at exit handler it doesnt work...
Python
mit
arteria-project/arteria-delivery
import subprocess import atexit from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) # On exiting the main program, make sure that the subprocess # gets killed. atexit.register(p.terminate) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code) Remove at exit handler it doesnt work...
import subprocess from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
<commit_before> import subprocess import atexit from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) # On exiting the main program, make sure that the subprocess # gets killed. atexit.register(p.terminate) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code) <commit_msg>Remove at exit handler it doesnt work...<commit_after>
import subprocess from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
import subprocess import atexit from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) # On exiting the main program, make sure that the subprocess # gets killed. atexit.register(p.terminate) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code) Remove at exit handler it doesnt work... import subprocess from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
<commit_before> import subprocess import atexit from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) # On exiting the main program, make sure that the subprocess # gets killed. atexit.register(p.terminate) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code) <commit_msg>Remove at exit handler it doesnt work...<commit_after> import subprocess from delivery.models.execution import ExecutionResult, Execution class ExternalProgramService(): @staticmethod def run(cmd): p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE) return Execution(pid=p.pid, process_obj=p) @staticmethod def run_and_wait(cmd): execution = ExternalProgramService.run(cmd) out, err = execution.process_obj.communicate() status_code = execution.process_obj.wait() return ExecutionResult(out, err, status_code)
bc7de1382f5df8253a3680fbba435a6485148815
main.py
main.py
import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|unsign)', commands.remove) bot.run(config['token'])
import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|so)', commands.remove) bot.run(config['token'])
Make remove commands match docstring
Make remove commands match docstring
Python
mit
veryhappythings/discord-gather
import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|unsign)', commands.remove) bot.run(config['token']) Make remove commands match docstring
import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|so)', commands.remove) bot.run(config['token'])
<commit_before>import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|unsign)', commands.remove) bot.run(config['token']) <commit_msg>Make remove commands match docstring<commit_after>
import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|so)', commands.remove) bot.run(config['token'])
import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|unsign)', commands.remove) bot.run(config['token']) Make remove commands match docstringimport logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|so)', commands.remove) bot.run(config['token'])
<commit_before>import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|unsign)', commands.remove) bot.run(config['token']) <commit_msg>Make remove commands match docstring<commit_after>import logging import json from gather.gatherbot import GatherBot from gather import commands if __name__ == '__main__': logging.basicConfig( level=logging.INFO, format="%(asctime)s,%(msecs)03d %(levelname)-5.5s [%(name)s] %(message)s", ) with open('config.json') as f: config = json.load(f) bot = GatherBot() bot.register_action('^!help', commands.bot_help) bot.register_action('^!(?:add|s)', commands.add) bot.register_action('^!(?:remove|so)', commands.remove) bot.run(config['token'])
64d2a7a7e4cd0375efaddc8ef20889755d691b7e
simplenote-backup.py
simplenote-backup.py
import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) print "Done: %d files." % len(notes['index'])
import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) os.utime(path,(note['d']['modificationDate'],note['d']['modificationDate'])) print "Done: %d files." % len(notes['index'])
Set the modification dates of the created files to that of the notes
Set the modification dates of the created files to that of the notes
Python
mit
hiroshi/simplenote-backup
import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) print "Done: %d files." % len(notes['index']) Set the modification dates of the created files to that of the notes
import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) os.utime(path,(note['d']['modificationDate'],note['d']['modificationDate'])) print "Done: %d files." % len(notes['index'])
<commit_before>import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) print "Done: %d files." % len(notes['index']) <commit_msg>Set the modification dates of the created files to that of the notes<commit_after>
import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) os.utime(path,(note['d']['modificationDate'],note['d']['modificationDate'])) print "Done: %d files." % len(notes['index'])
import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) print "Done: %d files." % len(notes['index']) Set the modification dates of the created files to that of the notesimport os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) os.utime(path,(note['d']['modificationDate'],note['d']['modificationDate'])) print "Done: %d files." % len(notes['index'])
<commit_before>import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) print "Done: %d files." % len(notes['index']) <commit_msg>Set the modification dates of the created files to that of the notes<commit_after>import os, sys, json from simperium.core import Api as SimperiumApi appname = 'chalk-bump-f49' # Simplenote token = os.environ['TOKEN'] backup_dir = sys.argv[1] if len(sys.argv) > 1 else (os.path.join(os.environ['HOME'], "Dropbox/SimplenoteBackups")) print "Starting backup your simplenote to: %s" % backup_dir if not os.path.exists(backup_dir): print "Creating directory: %s" % backup_dir os.makedirs(backup_dir) api = SimperiumApi(appname, token) #print token notes = api.note.index(data=True) for note in notes['index']: path = os.path.join(backup_dir, note['id'] + '.txt') #print path with open(path, "w") as f: # print json.dumps(note, indent=2) #f.write("id: %s\n" % note['id']) f.write(note['d']['content'].encode('utf8')) f.write("\n") f.write("Tags: %s\n" % ", ".join(note['d']['tags']).encode('utf8')) os.utime(path,(note['d']['modificationDate'],note['d']['modificationDate'])) print "Done: %d files." % len(notes['index'])
476754a381fe38a0bbe6e3c7892c59a6cfa47db1
openedx/features/job_board/models.py
openedx/features/job_board/models.py
from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name)
from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name.encode('utf-8'))
Add support for countries with accented characters
Add support for countries with accented characters
Python
agpl-3.0
philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform
from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name) Add support for countries with accented characters
from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name.encode('utf-8'))
<commit_before>from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name) <commit_msg>Add support for countries with accented characters<commit_after>
from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name.encode('utf-8'))
from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name) Add support for countries with accented charactersfrom django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name.encode('utf-8'))
<commit_before>from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name) <commit_msg>Add support for countries with accented characters<commit_after>from django.db import models from django_countries.fields import CountryField from model_utils.models import TimeStampedModel from .constants import JOB_COMPENSATION_CHOICES, JOB_HOURS_CHOICES, JOB_TYPE_CHOICES class Job(TimeStampedModel): """ This model contains all the fields related to a job being posted on the job board. """ title = models.CharField(max_length=255) company = models.CharField(max_length=255) type = models.CharField(max_length=255, choices=JOB_TYPE_CHOICES) compensation = models.CharField(max_length=255, choices=JOB_COMPENSATION_CHOICES) hours = models.CharField(max_length=255, choices=JOB_HOURS_CHOICES) city = models.CharField(max_length=255) country = CountryField() description = models.TextField() function = models.TextField(blank=True, null=True) responsibilities = models.TextField(blank=True, null=True) website_link = models.URLField(max_length=255, blank=True, null=True) contact_email = models.EmailField(max_length=255) logo = models.ImageField(upload_to='job-board/uploaded-logos/', blank=True, null=True) @property def location(self): """Get the full location (city, country) of job.""" return '{city}, {country}'.format(city=self.city, country=self.country.name.encode('utf-8'))
d0c82bdd2d7e801c5bebc8ef0d87ed436e29fb82
wiblog/formatting.py
wiblog/formatting.py
from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): parser = CommonMark.Parser() renderer = CommonMark.HTMLRenderer() ast = parser.parse(value) return mark_safe(renderer.render(ast)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody
from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): return mark_safe(CommonMark.commonmark(value)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody
Remove full CommonMark syntax for a simplified version
Remove full CommonMark syntax for a simplified version
Python
agpl-3.0
lo-windigo/fragdev,lo-windigo/fragdev
from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): parser = CommonMark.Parser() renderer = CommonMark.HTMLRenderer() ast = parser.parse(value) return mark_safe(renderer.render(ast)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody Remove full CommonMark syntax for a simplified version
from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): return mark_safe(CommonMark.commonmark(value)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody
<commit_before>from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): parser = CommonMark.Parser() renderer = CommonMark.HTMLRenderer() ast = parser.parse(value) return mark_safe(renderer.render(ast)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody <commit_msg>Remove full CommonMark syntax for a simplified version<commit_after>
from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): return mark_safe(CommonMark.commonmark(value)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody
from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): parser = CommonMark.Parser() renderer = CommonMark.HTMLRenderer() ast = parser.parse(value) return mark_safe(renderer.render(ast)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody Remove full CommonMark syntax for a simplified versionfrom django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): return mark_safe(CommonMark.commonmark(value)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody
<commit_before>from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): parser = CommonMark.Parser() renderer = CommonMark.HTMLRenderer() ast = parser.parse(value) return mark_safe(renderer.render(ast)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody <commit_msg>Remove full CommonMark syntax for a simplified version<commit_after>from django.utils.safestring import mark_safe import CommonMark # Convert a markdown string into HTML5, and prevent Django from escaping it def mdToHTML(value): return mark_safe(CommonMark.commonmark(value)) # Get a summary of a post def summarize(fullBody): firstNewline = fullBody.find("\n") if firstNewline > 0: return fullBody[:firstNewline] return fullBody
fb5583562f9c48c82d51b24f901a5111542eb1a9
website/project/spam/__init__.py
website/project/spam/__init__.py
from celery.utils.log import get_task_logger from framework.celery_tasks import app as celery_app from website import settings from website.util import akismet logger = get_task_logger(__name__) @celery_app.task def _check_for_spam(node_id, content, author_info, request_headers, flag=True): client = akismet.AkismetClient( apikey=settings.AKISMET_APIKEY, website=settings.DOMAIN, verify=True ) is_possible_spam, pro_tip = client.check_comment( user_ip=request_headers['Remote-Addr'], user_agent=request_headers['User-Agent'], referrer=request_headers.get('Referrer'), comment_content=content, comment_author=author_info['name'], comment_author_email=author_info['email'] ) if is_possible_spam: from website.project.model import Node node = Node.load(node_id) logger.info("Node '{}' ({}) smells like spam".format(node.title, node._id)) if flag: node.flag_spam(save=True) else: return True else: logger.info('Node {} smells like ham'.format(node_id)) return False def check_node_for_spam(document, creator, request_headers, flag=True, async=True): content = """ {} {} {} """.format( document['title'], document['description'], '\n'.join(document['wikis'].values()) ) if async: _check_for_spam.delay(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers) else: return _check_for_spam(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers)
Add logging to spam tasks
Add logging to spam tasks [#OSF-6977] [skip ci]
Python
apache-2.0
cslzchen/osf.io,binoculars/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,chennan47/osf.io,pattisdr/osf.io,caneruguz/osf.io,erinspace/osf.io,mfraezz/osf.io,erinspace/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,sloria/osf.io,aaxelb/osf.io,cwisecarver/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,pattisdr/osf.io,felliott/osf.io,adlius/osf.io,Nesiehr/osf.io,chennan47/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,mfraezz/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,TomBaxter/osf.io,laurenrevere/osf.io,aaxelb/osf.io,adlius/osf.io,rdhyee/osf.io,adlius/osf.io,leb2dg/osf.io,chrisseto/osf.io,cwisecarver/osf.io,hmoco/osf.io,rdhyee/osf.io,mluo613/osf.io,laurenrevere/osf.io,acshi/osf.io,sloria/osf.io,mfraezz/osf.io,alexschiller/osf.io,emetsger/osf.io,icereval/osf.io,acshi/osf.io,binoculars/osf.io,mluo613/osf.io,chrisseto/osf.io,caseyrollins/osf.io,saradbowman/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,acshi/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,saradbowman/osf.io,chrisseto/osf.io,mfraezz/osf.io,TomBaxter/osf.io,hmoco/osf.io,felliott/osf.io,leb2dg/osf.io,caseyrollins/osf.io,alexschiller/osf.io,adlius/osf.io,brianjgeiger/osf.io,mattclark/osf.io,mattclark/osf.io,icereval/osf.io,TomBaxter/osf.io,emetsger/osf.io,pattisdr/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,cslzchen/osf.io,chennan47/osf.io,CenterForOpenScience/osf.io,alexschiller/osf.io,felliott/osf.io,brianjgeiger/osf.io,icereval/osf.io,leb2dg/osf.io,acshi/osf.io,crcresearch/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,rdhyee/osf.io,mluo613/osf.io,Nesiehr/osf.io,crcresearch/osf.io,aaxelb/osf.io,baylee-d/osf.io,cslzchen/osf.io,chrisseto/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,mattclark/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,cslzchen/osf.io,mluo613/osf.io,binoculars/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,sloria/osf.io,acshi/osf.io,Nesiehr/osf.io
Add logging to spam tasks [#OSF-6977] [skip ci]
from celery.utils.log import get_task_logger from framework.celery_tasks import app as celery_app from website import settings from website.util import akismet logger = get_task_logger(__name__) @celery_app.task def _check_for_spam(node_id, content, author_info, request_headers, flag=True): client = akismet.AkismetClient( apikey=settings.AKISMET_APIKEY, website=settings.DOMAIN, verify=True ) is_possible_spam, pro_tip = client.check_comment( user_ip=request_headers['Remote-Addr'], user_agent=request_headers['User-Agent'], referrer=request_headers.get('Referrer'), comment_content=content, comment_author=author_info['name'], comment_author_email=author_info['email'] ) if is_possible_spam: from website.project.model import Node node = Node.load(node_id) logger.info("Node '{}' ({}) smells like spam".format(node.title, node._id)) if flag: node.flag_spam(save=True) else: return True else: logger.info('Node {} smells like ham'.format(node_id)) return False def check_node_for_spam(document, creator, request_headers, flag=True, async=True): content = """ {} {} {} """.format( document['title'], document['description'], '\n'.join(document['wikis'].values()) ) if async: _check_for_spam.delay(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers) else: return _check_for_spam(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers)
<commit_before><commit_msg>Add logging to spam tasks [#OSF-6977] [skip ci]<commit_after>
from celery.utils.log import get_task_logger from framework.celery_tasks import app as celery_app from website import settings from website.util import akismet logger = get_task_logger(__name__) @celery_app.task def _check_for_spam(node_id, content, author_info, request_headers, flag=True): client = akismet.AkismetClient( apikey=settings.AKISMET_APIKEY, website=settings.DOMAIN, verify=True ) is_possible_spam, pro_tip = client.check_comment( user_ip=request_headers['Remote-Addr'], user_agent=request_headers['User-Agent'], referrer=request_headers.get('Referrer'), comment_content=content, comment_author=author_info['name'], comment_author_email=author_info['email'] ) if is_possible_spam: from website.project.model import Node node = Node.load(node_id) logger.info("Node '{}' ({}) smells like spam".format(node.title, node._id)) if flag: node.flag_spam(save=True) else: return True else: logger.info('Node {} smells like ham'.format(node_id)) return False def check_node_for_spam(document, creator, request_headers, flag=True, async=True): content = """ {} {} {} """.format( document['title'], document['description'], '\n'.join(document['wikis'].values()) ) if async: _check_for_spam.delay(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers) else: return _check_for_spam(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers)
Add logging to spam tasks [#OSF-6977] [skip ci]from celery.utils.log import get_task_logger from framework.celery_tasks import app as celery_app from website import settings from website.util import akismet logger = get_task_logger(__name__) @celery_app.task def _check_for_spam(node_id, content, author_info, request_headers, flag=True): client = akismet.AkismetClient( apikey=settings.AKISMET_APIKEY, website=settings.DOMAIN, verify=True ) is_possible_spam, pro_tip = client.check_comment( user_ip=request_headers['Remote-Addr'], user_agent=request_headers['User-Agent'], referrer=request_headers.get('Referrer'), comment_content=content, comment_author=author_info['name'], comment_author_email=author_info['email'] ) if is_possible_spam: from website.project.model import Node node = Node.load(node_id) logger.info("Node '{}' ({}) smells like spam".format(node.title, node._id)) if flag: node.flag_spam(save=True) else: return True else: logger.info('Node {} smells like ham'.format(node_id)) return False def check_node_for_spam(document, creator, request_headers, flag=True, async=True): content = """ {} {} {} """.format( document['title'], document['description'], '\n'.join(document['wikis'].values()) ) if async: _check_for_spam.delay(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers) else: return _check_for_spam(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers)
<commit_before><commit_msg>Add logging to spam tasks [#OSF-6977] [skip ci]<commit_after>from celery.utils.log import get_task_logger from framework.celery_tasks import app as celery_app from website import settings from website.util import akismet logger = get_task_logger(__name__) @celery_app.task def _check_for_spam(node_id, content, author_info, request_headers, flag=True): client = akismet.AkismetClient( apikey=settings.AKISMET_APIKEY, website=settings.DOMAIN, verify=True ) is_possible_spam, pro_tip = client.check_comment( user_ip=request_headers['Remote-Addr'], user_agent=request_headers['User-Agent'], referrer=request_headers.get('Referrer'), comment_content=content, comment_author=author_info['name'], comment_author_email=author_info['email'] ) if is_possible_spam: from website.project.model import Node node = Node.load(node_id) logger.info("Node '{}' ({}) smells like spam".format(node.title, node._id)) if flag: node.flag_spam(save=True) else: return True else: logger.info('Node {} smells like ham'.format(node_id)) return False def check_node_for_spam(document, creator, request_headers, flag=True, async=True): content = """ {} {} {} """.format( document['title'], document['description'], '\n'.join(document['wikis'].values()) ) if async: _check_for_spam.delay(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers) else: return _check_for_spam(document['id'], content, { 'email': creator.username, 'name': creator.fullname }, request_headers)
74e5a3e347fee91993604dd8407c17fe05da346b
zerrenda/settings/development.py
zerrenda/settings/development.py
from zerrenda.settings import * DEBUG = True INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles' )
from zerrenda.settings import * DEBUG = True
Remove duplicate INSTALLED_APPS from settings
Remove duplicate INSTALLED_APPS from settings
Python
mit
ajoyoommen/zerrenda,ajoyoommen/zerrenda
from zerrenda.settings import * DEBUG = True INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles' ) Remove duplicate INSTALLED_APPS from settings
from zerrenda.settings import * DEBUG = True
<commit_before>from zerrenda.settings import * DEBUG = True INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles' ) <commit_msg>Remove duplicate INSTALLED_APPS from settings<commit_after>
from zerrenda.settings import * DEBUG = True
from zerrenda.settings import * DEBUG = True INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles' ) Remove duplicate INSTALLED_APPS from settingsfrom zerrenda.settings import * DEBUG = True
<commit_before>from zerrenda.settings import * DEBUG = True INSTALLED_APPS = ( 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles' ) <commit_msg>Remove duplicate INSTALLED_APPS from settings<commit_after>from zerrenda.settings import * DEBUG = True
2fcf9131bca907d79d96c622d015ba99de038e8d
zou/app/stores/publisher_store.py
zou/app/stores/publisher_store.py
from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): data = { "type": event, "data": data } if socketio is not None: socketio.emit("event", data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio
from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): if socketio is not None: socketio.emit(event, data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio
Make publisher store emit the right event name
Make publisher store emit the right event name
Python
agpl-3.0
cgwire/zou
from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): data = { "type": event, "data": data } if socketio is not None: socketio.emit("event", data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio Make publisher store emit the right event name
from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): if socketio is not None: socketio.emit(event, data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio
<commit_before>from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): data = { "type": event, "data": data } if socketio is not None: socketio.emit("event", data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio <commit_msg>Make publisher store emit the right event name<commit_after>
from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): if socketio is not None: socketio.emit(event, data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio
from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): data = { "type": event, "data": data } if socketio is not None: socketio.emit("event", data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio Make publisher store emit the right event namefrom flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): if socketio is not None: socketio.emit(event, data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio
<commit_before>from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): data = { "type": event, "data": data } if socketio is not None: socketio.emit("event", data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio <commit_msg>Make publisher store emit the right event name<commit_after>from flask_socketio import SocketIO from flask import current_app from zou.app import config host = config.KEY_VALUE_STORE["host"] port = config.KEY_VALUE_STORE["port"] redis_db = config.KV_EVENTS_DB_INDEX redis_url = "redis://%s:%s/%s" % (host, port, redis_db) socketio = None def publish(event, data): if socketio is not None: socketio.emit(event, data, namespace="/events") else: current_app.logger.error( "Publisher store not initialized, run init() befor emitting events" ) def init(): """ Initialize key value store that will be used for the event publishing. That way the main API takes advantage of Redis pub/sub capabilities to push events to the event stream API. """ global socketio socketio = SocketIO(message_queue=redis_url) return socketio
24e6d37108bc01b69d2f64014862bebd1e980fee
olim/olim/apps/storage/models.py
olim/olim/apps/storage/models.py
from django.db import models # Create your models here.
from django.db import models class Filesys(models.Model): name = models.CharField(max_length=100) url = models.URLField() date = models.DateField(auto_now=True) #uploader = models.ForeignKey('account.User') thumbnail = models.FileField(upload_to='thumb') parent_dir = models.CharField(max_length=100) is_dir = models.BooleanField() def __str__(self): if is_dir: return '/' + name else: return name
Make a model 'Filesys'. (not yet, because of uploader field)
Make a model 'Filesys'. (not yet, because of uploader field)
Python
apache-2.0
sparcs-kaist/olim,sparcs-kaist/olim
from django.db import models # Create your models here. Make a model 'Filesys'. (not yet, because of uploader field)
from django.db import models class Filesys(models.Model): name = models.CharField(max_length=100) url = models.URLField() date = models.DateField(auto_now=True) #uploader = models.ForeignKey('account.User') thumbnail = models.FileField(upload_to='thumb') parent_dir = models.CharField(max_length=100) is_dir = models.BooleanField() def __str__(self): if is_dir: return '/' + name else: return name
<commit_before>from django.db import models # Create your models here. <commit_msg>Make a model 'Filesys'. (not yet, because of uploader field)<commit_after>
from django.db import models class Filesys(models.Model): name = models.CharField(max_length=100) url = models.URLField() date = models.DateField(auto_now=True) #uploader = models.ForeignKey('account.User') thumbnail = models.FileField(upload_to='thumb') parent_dir = models.CharField(max_length=100) is_dir = models.BooleanField() def __str__(self): if is_dir: return '/' + name else: return name
from django.db import models # Create your models here. Make a model 'Filesys'. (not yet, because of uploader field)from django.db import models class Filesys(models.Model): name = models.CharField(max_length=100) url = models.URLField() date = models.DateField(auto_now=True) #uploader = models.ForeignKey('account.User') thumbnail = models.FileField(upload_to='thumb') parent_dir = models.CharField(max_length=100) is_dir = models.BooleanField() def __str__(self): if is_dir: return '/' + name else: return name
<commit_before>from django.db import models # Create your models here. <commit_msg>Make a model 'Filesys'. (not yet, because of uploader field)<commit_after>from django.db import models class Filesys(models.Model): name = models.CharField(max_length=100) url = models.URLField() date = models.DateField(auto_now=True) #uploader = models.ForeignKey('account.User') thumbnail = models.FileField(upload_to='thumb') parent_dir = models.CharField(max_length=100) is_dir = models.BooleanField() def __str__(self): if is_dir: return '/' + name else: return name
bd3b6d1703598d362c29417e0d64e2050a79fbee
willtherebespace/web/__main__.py
willtherebespace/web/__main__.py
from willtherebespace.web import app app.run(port=5000, debug=True)
from willtherebespace.web import app app.run(host='0.0.0.0', port=8000, debug=True)
Change default web server port
Change default web server port
Python
mit
thomasleese/will-there-be-space,tomleese/will-there-be-space,tomleese/will-there-be-space,tomleese/will-there-be-space,tomleese/will-there-be-space,thomasleese/will-there-be-space,tomleese/will-there-be-space,thomasleese/will-there-be-space,thomasleese/will-there-be-space,thomasleese/will-there-be-space
from willtherebespace.web import app app.run(port=5000, debug=True) Change default web server port
from willtherebespace.web import app app.run(host='0.0.0.0', port=8000, debug=True)
<commit_before>from willtherebespace.web import app app.run(port=5000, debug=True) <commit_msg>Change default web server port<commit_after>
from willtherebespace.web import app app.run(host='0.0.0.0', port=8000, debug=True)
from willtherebespace.web import app app.run(port=5000, debug=True) Change default web server portfrom willtherebespace.web import app app.run(host='0.0.0.0', port=8000, debug=True)
<commit_before>from willtherebespace.web import app app.run(port=5000, debug=True) <commit_msg>Change default web server port<commit_after>from willtherebespace.web import app app.run(host='0.0.0.0', port=8000, debug=True)
d10656527cf3a0fe3d47827d8d2f27fda4cb2a5c
zseqfile/__init__.py
zseqfile/__init__.py
""" zseqfile - transparently handle compressed files """ # Expose the public API. from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, )
""" zseqfile - transparently handle compressed files """ from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) open_gz = open_gzip open_bz2 = open_bzip2 open_xz = open_lzma
Define a few convenience aliases in the public API
Define a few convenience aliases in the public API
Python
bsd-3-clause
wbolster/zseqfile
""" zseqfile - transparently handle compressed files """ # Expose the public API. from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) Define a few convenience aliases in the public API
""" zseqfile - transparently handle compressed files """ from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) open_gz = open_gzip open_bz2 = open_bzip2 open_xz = open_lzma
<commit_before>""" zseqfile - transparently handle compressed files """ # Expose the public API. from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) <commit_msg>Define a few convenience aliases in the public API<commit_after>
""" zseqfile - transparently handle compressed files """ from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) open_gz = open_gzip open_bz2 = open_bzip2 open_xz = open_lzma
""" zseqfile - transparently handle compressed files """ # Expose the public API. from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) Define a few convenience aliases in the public API""" zseqfile - transparently handle compressed files """ from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) open_gz = open_gzip open_bz2 = open_bzip2 open_xz = open_lzma
<commit_before>""" zseqfile - transparently handle compressed files """ # Expose the public API. from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) <commit_msg>Define a few convenience aliases in the public API<commit_after>""" zseqfile - transparently handle compressed files """ from .zseqfile import ( # noqa open, open_gzip, open_bzip2, open_lzma, ) open_gz = open_gzip open_bz2 = open_bzip2 open_xz = open_lzma
ff153f141eb67f124520c51de69e161436ff0666
GreyMatter/business_news_reader.py
GreyMatter/business_news_reader.py
import json import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower() for element in news_headlines_list] news_details_list_small = [element.lower() for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value)
import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_headlines_list] news_details_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value)
Fix errors due to quotes and parenthesis in reading
Fix errors due to quotes and parenthesis in reading
Python
mit
anurag-ks/Melissa-Core,Melissa-AI/Melissa-Core,Melissa-AI/Melissa-Core,anurag-ks/Melissa-Core,Melissa-AI/Melissa-Core,anurag-ks/Melissa-Core,Melissa-AI/Melissa-Core,anurag-ks/Melissa-Core
import json import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower() for element in news_headlines_list] news_details_list_small = [element.lower() for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value) Fix errors due to quotes and parenthesis in reading
import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_headlines_list] news_details_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value)
<commit_before>import json import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower() for element in news_headlines_list] news_details_list_small = [element.lower() for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value) <commit_msg>Fix errors due to quotes and parenthesis in reading<commit_after>
import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_headlines_list] news_details_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value)
import json import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower() for element in news_headlines_list] news_details_list_small = [element.lower() for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value) Fix errors due to quotes and parenthesis in readingimport requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_headlines_list] news_details_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value)
<commit_before>import json import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower() for element in news_headlines_list] news_details_list_small = [element.lower() for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value) <commit_msg>Fix errors due to quotes and parenthesis in reading<commit_after>import requests from bs4 import BeautifulSoup from SenseCells.tts import tts # NDTV News fixed_url = 'http://profit.ndtv.com/news/latest/' news_headlines_list = [] news_details_list = [] for i in range(1, 2): changing_slug = '/page-' + str(i) url = fixed_url + changing_slug r = requests.get(url) data = r.text soup = BeautifulSoup(data, "html.parser") for news_headlines in soup.find_all('h2'): news_headlines_list.append(news_headlines.get_text()) del news_headlines_list[-2:] for news_details in soup.find_all('p', 'intro'): news_details_list.append(news_details.get_text()) news_headlines_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_headlines_list] news_details_list_small = [element.lower().replace("(", "").replace(")", "").replace("'", "") for element in news_details_list] news_dictionary = dict(zip(news_headlines_list_small, news_details_list_small)) def news_reader(): for key, value in news_dictionary.items(): tts('Headline, ' + key) tts('News, ' + value)
5bfc42e4f7c948b1cb895ebec523426be6908829
dockorm/tests/utils.py
dockorm/tests/utils.py
""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value
""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os import getenv from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( getenv('DOCKORM_TESTS_DIR') or dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value
Enable running tests from inside a container
TST: Enable running tests from inside a container by setting an env var for the path to the test volume data on the host
Python
apache-2.0
quantopian/DockORM
""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value TST: Enable running tests from inside a container by setting an env var for the path to the test volume data on the host
""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os import getenv from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( getenv('DOCKORM_TESTS_DIR') or dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value
<commit_before>""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value <commit_msg>TST: Enable running tests from inside a container by setting an env var for the path to the test volume data on the host<commit_after>
""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os import getenv from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( getenv('DOCKORM_TESTS_DIR') or dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value
""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value TST: Enable running tests from inside a container by setting an env var for the path to the test volume data on the host""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os import getenv from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( getenv('DOCKORM_TESTS_DIR') or dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value
<commit_before>""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value <commit_msg>TST: Enable running tests from inside a container by setting an env var for the path to the test volume data on the host<commit_after>""" Utilities for dockorm tests. """ # encoding: utf-8 from __future__ import unicode_literals from os import getenv from os.path import ( dirname, join, ) from six import iteritems from ..container import ( Container, scalar, ) TEST_ORG = 'dockorm_testing' TEST_TAG = 'test' def assert_in_logs(container, line): """ Assert that the given lines are in the container's logs. """ logs = scalar(container.logs(all=True)) validate_dict(logs, {'Logs': line}) def dockerfile_root(path): """ Path to a directory Dockerfile for testing. """ return join( dirname(__file__), 'dockerfiles', path, ) def make_container(image, **kwargs): return Container( image=image, build_path=dockerfile_root(image), organization=TEST_ORG, tag=TEST_TAG, **kwargs ) def volume(path): """ Path to a file relative to the test volumes directory. """ return join( getenv('DOCKORM_TESTS_DIR') or dirname(__file__), 'volumes', path, ) def validate_dict(to_test, expected): """ Recursively validate a dictionary of expectations against another input. Like TestCase.assertDictContainsSubset, but recursive. """ for key, value in iteritems(expected): if isinstance(value, dict): validate_dict(to_test[key], value) else: assert to_test[key] == value
ceaae1b0f9191ad84cfd80cbf78e5d14c11f7ea6
src/async_signals/dispatcher.py
src/async_signals/dispatcher.py
from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propogate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass
from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propagate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass
Fix typo in self.propagate_signal call
Fix typo in self.propagate_signal call
Python
bsd-3-clause
nyergler/async-signals
from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propogate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass Fix typo in self.propagate_signal call
from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propagate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass
<commit_before>from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propogate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass <commit_msg>Fix typo in self.propagate_signal call<commit_after>
from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propagate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass
from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propogate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass Fix typo in self.propagate_signal callfrom celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propagate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass
<commit_before>from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propogate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass <commit_msg>Fix typo in self.propagate_signal call<commit_after>from celery import task from django.dispatch.dispatcher import ( _make_id, Signal, ) class AsyncSignal(Signal): def __init__(self, providing_args=None, queue=None): super(AsyncSignal, self).__init__(providing_args=providing_args) self.queue = queue def send(self, sender, **named): """Send the signal via Celery.""" self.propagate_signal.apply_async( args=(sender), kwargs=named, queue=self.queue, ) @task def propagate_signal(self, sender, **named): """ Send signal from sender to all connected receivers catching errors. Arguments: sender The sender of the signal. Can be any python object (normally one registered with a connect if you actually want something to occur). named Named arguments which will be passed to receivers. These arguments must be a subset of the argument names defined in providing_args. Return a list of tuple pairs [(receiver, response), ... ]. May raise DispatcherKeyError. If any receiver raises an error (specifically any subclass of Exception), the error instance is returned as the result for that receiver. """ # Call each receiver with whatever arguments it can accept. for receiver in self._live_receivers(_make_id(sender)): try: receiver(signal=self, sender=sender, **named) except Exception: pass
ed3906b295669b1c0e38d88a7eb19cdde324042b
pybuild/packages/libzmq.py
pybuild/packages/libzmq.py
from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr'])
from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) self.system( f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi' )
Fix issue for building PyZMQ
Fix issue for building PyZMQ
Python
apache-2.0
qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core,qpython-android/QPython3-core
from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) Fix issue for building PyZMQ
from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) self.system( f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi' )
<commit_before>from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) <commit_msg>Fix issue for building PyZMQ<commit_after>
from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) self.system( f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi' )
from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) Fix issue for building PyZMQfrom ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) self.system( f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi' )
<commit_before>from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) <commit_msg>Fix issue for building PyZMQ<commit_after>from ..source import GitSource from ..package import Package from ..patch import LocalPatch from ..util import target_arch class LibZMQ(Package): source = GitSource('https://github.com/AIPYX/zeromq3-x.git', alias='libzmq', branch='qpyc/3.2.5') patches = [ LocalPatch('0001-Fix-libtoolize-s-issue-in-autogen.sh'), #LocalPatch('0001-Disable-stderr-in-blas_server'), ] skip_uploading = True re_configure = True def prepare(self): pass def build(self): import os self.system(f'./autogen.sh') self.system(f'CC=\"{os.getenv("CC")}\" CXX=\"{os.getenv("CXX")}\" LDFLAGS=\"-lgnustl_shared -lsupc++ -latomic -L{self.env["ANDROID_NDK"]}/sources/cxx-stl/gnu-libstdc++/4.9/libs/armeabi-v7a\" ./configure --host=arm-linux-androideabi --target=arm-linux-androideabi --prefix={self.destdir()}') self.system(f'make install') #self.run_with_env(['make', 'install', f'PREFIX={self.destdir()}/usr']) self.system( f'if [ -e {self.destdir()}/lib/libzmq.so ] ; then mv {self.destdir()}/lib/libzmq.so {self.destdir()}/lib/libzmq.so.old; fi' )
960520b723d1af1999c647ebea8969b4837aa458
blister/xmp.py
blister/xmp.py
# Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class VanillaXMP: pass
# Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from collections.abc import MutableMapping class VanillaXMP (MutableMapping): def __delitem__ (self, key): pass def __getitem__ (self, key): pass def __iter__ (self): pass def __len__ (self): pass def __setitem__ (self, key, value): pass
Write minimal code to implement MutableMapping
Write minimal code to implement MutableMapping
Python
bsd-3-clause
daaang/blister
# Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class VanillaXMP: pass Write minimal code to implement MutableMapping
# Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from collections.abc import MutableMapping class VanillaXMP (MutableMapping): def __delitem__ (self, key): pass def __getitem__ (self, key): pass def __iter__ (self): pass def __len__ (self): pass def __setitem__ (self, key, value): pass
<commit_before># Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class VanillaXMP: pass <commit_msg>Write minimal code to implement MutableMapping<commit_after>
# Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from collections.abc import MutableMapping class VanillaXMP (MutableMapping): def __delitem__ (self, key): pass def __getitem__ (self, key): pass def __iter__ (self): pass def __len__ (self): pass def __setitem__ (self, key, value): pass
# Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class VanillaXMP: pass Write minimal code to implement MutableMapping# Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from collections.abc import MutableMapping class VanillaXMP (MutableMapping): def __delitem__ (self, key): pass def __getitem__ (self, key): pass def __iter__ (self): pass def __len__ (self): pass def __setitem__ (self, key, value): pass
<commit_before># Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. class VanillaXMP: pass <commit_msg>Write minimal code to implement MutableMapping<commit_after># Copyright (c) 2016 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from collections.abc import MutableMapping class VanillaXMP (MutableMapping): def __delitem__ (self, key): pass def __getitem__ (self, key): pass def __iter__ (self): pass def __len__ (self): pass def __setitem__ (self, key, value): pass
ba23f58f7359b943d8d8ae7f05e15419c6918c6f
test/blacklist.py
test/blacklist.py
""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {}
""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log # The clang version used is clang-126. # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log }
Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126.
Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126. To reproduce: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@121887 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,llvm-mirror/lldb
""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {} Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126. To reproduce: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@121887 91177308-0d34-0410-b5e6-96231b3b80d8
""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log # The clang version used is clang-126. # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log }
<commit_before>""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {} <commit_msg>Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126. To reproduce: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@121887 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log # The clang version used is clang-126. # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log }
""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {} Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126. To reproduce: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@121887 91177308-0d34-0410-b5e6-96231b3b80d8""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log # The clang version used is clang-126. # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log }
<commit_before>""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {} <commit_msg>Add an entry for test case BasicExprCommandsTestCase.test_evaluate_expression_python, due to crashes while running the entire test suite with clang-126. To reproduce: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log git-svn-id: b33bab8abb5b18c12ee100cd7761ab452d00b2b0@121887 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>""" 'blacklist' is a Python dictionary, it stores the mapping of a string describing either a testclass or a testcase, i.e, testclass.testmethod, to the reason (a string) it is blacklisted. Following is an example which states that test class IntegerTypesExprTestCase should be skipped because 'This test class crashed' and the test case FoundationTestCase.test_data_type_and_expr_with_dsym should be skipped because it is 'Temporarily disabled'. blacklist = {'IntegerTypesExprTestCase': 'This test class crashed', 'FoundationTestCase.test_data_type_and_expr_with_dsym': 'Temporarily disabled' } """ blacklist = {'BasicExprCommandsTestCase.test_evaluate_expression_python': 'Crashed while running the entire test suite with CC=clang' # To reproduce the crash: CC=clang ./dotest.py -v -w 2> ~/Developer/Log/lldbtest.log # The clang version used is clang-126. # Two radars filed for the crashes: rdar://problem/8769826 and rdar://problem/8773329. # To skip this test case: CC=clang ./dotest.py -b blacklist.py -v -w 2> ~/Developer/Log/lldbtest.log }
8c26cb08dd08b7e34352e51b06ecb9129ac201a1
stagecraft/libs/schemas/schemas.py
stagecraft/libs/schemas/schemas.py
from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: json_f = json_loads(f.read()) return json_f
from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: schema = json_loads(f.read()) return schema
Make the schema return object a bit more obvious and descriptive
Make the schema return object a bit more obvious and descriptive
Python
mit
alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft,alphagov/stagecraft
from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: json_f = json_loads(f.read()) return json_f Make the schema return object a bit more obvious and descriptive
from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: schema = json_loads(f.read()) return schema
<commit_before>from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: json_f = json_loads(f.read()) return json_f <commit_msg>Make the schema return object a bit more obvious and descriptive<commit_after>
from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: schema = json_loads(f.read()) return schema
from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: json_f = json_loads(f.read()) return json_f Make the schema return object a bit more obvious and descriptivefrom django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: schema = json_loads(f.read()) return schema
<commit_before>from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: json_f = json_loads(f.read()) return json_f <commit_msg>Make the schema return object a bit more obvious and descriptive<commit_after>from django.conf import settings from json import loads as json_loads from os import path def get_schema(): schema_root = path.join( settings.BASE_DIR, 'stagecraft/apps/datasets/schemas/timestamp.json' ) with open(schema_root) as f: schema = json_loads(f.read()) return schema
316d86bdf56ed376fc039aa0f6dbd7fb05548bac
scheduler/schedule.py
scheduler/schedule.py
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def debug_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start()
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start()
Fix stats job job's name
Fix stats job job's name
Python
apache-2.0
ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod,ec-geolink/d1lod
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def debug_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start() Fix stats job job's name
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start()
<commit_before>import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def debug_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start() <commit_msg>Fix stats job job's name<commit_after>
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start()
import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def debug_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start() Fix stats job job's nameimport sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start()
<commit_before>import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def debug_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start() <commit_msg>Fix stats job job's name<commit_after>import sys import time import logging logging.basicConfig(level=logging.DEBUG) from redis import StrictRedis from rq import Queue from apscheduler.schedulers.blocking import BlockingScheduler sys.path.append('/d1lod') from d1lod import jobs conn = StrictRedis(host='redis', port='6379') q = Queue(connection=conn) sched = BlockingScheduler() @sched.scheduled_job('interval', minutes=1) def update_job(): q.enqueue(jobs.update_graph) @sched.scheduled_job('interval', hours=1) def stats_job(): q.enqueue(jobs.calculate_stats) @sched.scheduled_job('interval', hours=1) def export_job(): q.enqueue(jobs.export_graph) @sched.scheduled_job('interval', minutes=1) def print_jobs_job(): sched.print_jobs() time.sleep(10) sched.start()
5d3349675a0b4049efedf52958b6843e9ef31c1b
src/models/image.py
src/models/image.py
import datetime from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
import datetime from utils.utils import limit_file_name class Image: _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
Remove redundant parenthesis in Image
Remove redundant parenthesis in Image
Python
apache-2.0
CharlieCorner/pymage_downloader
import datetime from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file) Remove redundant parenthesis in Image
import datetime from utils.utils import limit_file_name class Image: _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
<commit_before>import datetime from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file) <commit_msg>Remove redundant parenthesis in Image<commit_after>
import datetime from utils.utils import limit_file_name class Image: _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
import datetime from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file) Remove redundant parenthesis in Imageimport datetime from utils.utils import limit_file_name class Image: _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
<commit_before>import datetime from utils.utils import limit_file_name class Image(): _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file) <commit_msg>Remove redundant parenthesis in Image<commit_after>import datetime from utils.utils import limit_file_name class Image: _file_name_pattern = "reddit_%s_%s_%s_album_%s_%s_%s" def __init__(self, url, post, image_file): self.post_id = post.id self.url = url self.sub_display_name = post.subreddit.display_name self.image_file = limit_file_name(image_file) self.domain = post.domain self.created = datetime.datetime.fromtimestamp(post.created).strftime("%y%m%d") if "/a/" in post.url: self.album_id = post.url[post.url.index("/a/") + 3:] elif "/gallery/" in post.url: self.album_id = post.url[post.url.index("/gallery/") + 9:] else: self.album_id = None self.local_file_name = self._file_name_pattern % ( self.created, self.sub_display_name, self.post_id, self.album_id, self.domain, self.image_file)
e3f55180bba935f09355b73049c3729c797c9e9f
lc0266_palindrome_permutation.py
lc0266_palindrome_permutation.py
"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class Solution(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ pass def main(): pass if __name__ == '__main__': main()
"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class SolutionOneOddCharCounts(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ from collections import defaultdict if not s: return True # Count char counts; palindrome permutation only has one odd char count. char_counts = defaultdict(int) for c in s: char_counts[c] += 1 n_odd_chars = 0 for k, v in char_counts.items(): if v % 2 == 1: n_odd_chars += 1 if n_odd_chars >= 2: return False return True def main(): # Output: false s = "code" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "aab" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "carerac" print SolutionOneOddCharCounts().canPermutePalindrome(s) if __name__ == '__main__': main()
Complete one odd char count sol
Complete one odd char count sol
Python
bsd-2-clause
bowen0701/algorithms_data_structures
"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class Solution(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ pass def main(): pass if __name__ == '__main__': main() Complete one odd char count sol
"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class SolutionOneOddCharCounts(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ from collections import defaultdict if not s: return True # Count char counts; palindrome permutation only has one odd char count. char_counts = defaultdict(int) for c in s: char_counts[c] += 1 n_odd_chars = 0 for k, v in char_counts.items(): if v % 2 == 1: n_odd_chars += 1 if n_odd_chars >= 2: return False return True def main(): # Output: false s = "code" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "aab" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "carerac" print SolutionOneOddCharCounts().canPermutePalindrome(s) if __name__ == '__main__': main()
<commit_before>"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class Solution(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ pass def main(): pass if __name__ == '__main__': main() <commit_msg>Complete one odd char count sol<commit_after>
"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class SolutionOneOddCharCounts(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ from collections import defaultdict if not s: return True # Count char counts; palindrome permutation only has one odd char count. char_counts = defaultdict(int) for c in s: char_counts[c] += 1 n_odd_chars = 0 for k, v in char_counts.items(): if v % 2 == 1: n_odd_chars += 1 if n_odd_chars >= 2: return False return True def main(): # Output: false s = "code" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "aab" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "carerac" print SolutionOneOddCharCounts().canPermutePalindrome(s) if __name__ == '__main__': main()
"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class Solution(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ pass def main(): pass if __name__ == '__main__': main() Complete one odd char count sol"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class SolutionOneOddCharCounts(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ from collections import defaultdict if not s: return True # Count char counts; palindrome permutation only has one odd char count. char_counts = defaultdict(int) for c in s: char_counts[c] += 1 n_odd_chars = 0 for k, v in char_counts.items(): if v % 2 == 1: n_odd_chars += 1 if n_odd_chars >= 2: return False return True def main(): # Output: false s = "code" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "aab" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "carerac" print SolutionOneOddCharCounts().canPermutePalindrome(s) if __name__ == '__main__': main()
<commit_before>"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class Solution(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ pass def main(): pass if __name__ == '__main__': main() <commit_msg>Complete one odd char count sol<commit_after>"""Leetcode 266. Palindrome Permutation (Premium) Easy URL: https://leetcode.com/problems/palindrome-permutation Given a string, determine if a permutation of the string could form a palindrome. Example 1: Input: "code" Output: false Example 2: Input: "aab" Output: true Example 3: Input: "carerac" Output: true """ class SolutionOneOddCharCounts(object): def canPermutePalindrome(self, s): """ :type s: str :rtype: bool """ from collections import defaultdict if not s: return True # Count char counts; palindrome permutation only has one odd char count. char_counts = defaultdict(int) for c in s: char_counts[c] += 1 n_odd_chars = 0 for k, v in char_counts.items(): if v % 2 == 1: n_odd_chars += 1 if n_odd_chars >= 2: return False return True def main(): # Output: false s = "code" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "aab" print SolutionOneOddCharCounts().canPermutePalindrome(s) # Output: true s = "carerac" print SolutionOneOddCharCounts().canPermutePalindrome(s) if __name__ == '__main__': main()
b6d61fef0fe372c7149fa52e2ab1acff144d0118
tests/fixtures/dummy/facilities.py
tests/fixtures/dummy/facilities.py
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData from .finance import AccountData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" fee_account = AccountData.dummy_revenue class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" fee_account = AccountData.dummy_revenue class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5
Add fee_account to BuildingData of legacy test base
Add fee_account to BuildingData of legacy test base
Python
apache-2.0
agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5 Add fee_account to BuildingData of legacy test base
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData from .finance import AccountData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" fee_account = AccountData.dummy_revenue class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" fee_account = AccountData.dummy_revenue class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5
<commit_before># Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5 <commit_msg>Add fee_account to BuildingData of legacy test base<commit_after>
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData from .finance import AccountData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" fee_account = AccountData.dummy_revenue class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" fee_account = AccountData.dummy_revenue class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5
# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5 Add fee_account to BuildingData of legacy test base# Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData from .finance import AccountData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" fee_account = AccountData.dummy_revenue class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" fee_account = AccountData.dummy_revenue class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5
<commit_before># Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5 <commit_msg>Add fee_account to BuildingData of legacy test base<commit_after># Copyright (c) 2015 The Pycroft Authors. See the AUTHORS file. # This file is part of the Pycroft project and licensed under the terms of # the Apache License, Version 2.0. See the LICENSE file for details. from fixture import DataSet from .address import AddressData from .finance import AccountData class SiteData(DataSet): class dummy: name = "dummy" class BuildingData(DataSet): class dummy_house1: site = SiteData.dummy street = "dummy" number = "01" short_name = "abc" fee_account = AccountData.dummy_revenue class dummy_house2: site = SiteData.dummy street = "dummy" number = "02" short_name = "def" fee_account = AccountData.dummy_revenue class RoomData(DataSet): class dummy_room1: number = "1" level = 1 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address1 class dummy_room2: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house2 address = AddressData.dummy_address2 class dummy_room3: number = "2" level = 2 inhabitable = True building = BuildingData.dummy_house1 address = AddressData.dummy_address3 class dummy_room4(dummy_room1): number = "2" address = AddressData.dummy_address4 class dummy_room5(dummy_room1): number = "2" address = AddressData.dummy_address5
5f27e570a369fbb408a48a567064a96f1ceac277
tests/commands/project/utils.py
tests/commands/project/utils.py
from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(uuid4()), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) return m
from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() project_id = uuid4() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(project_id), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={ 'id': str(project_id), 'yaml_path': 'valohai.yaml', }) return m
Add a mock API path for project details, used in e.g. test_init
Add a mock API path for project details, used in e.g. test_init
Python
mit
valohai/valohai-cli
from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(uuid4()), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) return m Add a mock API path for project details, used in e.g. test_init
from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() project_id = uuid4() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(project_id), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={ 'id': str(project_id), 'yaml_path': 'valohai.yaml', }) return m
<commit_before>from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(uuid4()), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) return m <commit_msg>Add a mock API path for project details, used in e.g. test_init<commit_after>
from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() project_id = uuid4() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(project_id), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={ 'id': str(project_id), 'yaml_path': 'valohai.yaml', }) return m
from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(uuid4()), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) return m Add a mock API path for project details, used in e.g. test_initfrom uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() project_id = uuid4() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(project_id), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={ 'id': str(project_id), 'yaml_path': 'valohai.yaml', }) return m
<commit_before>from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(uuid4()), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) return m <commit_msg>Add a mock API path for project details, used in e.g. test_init<commit_after>from uuid import uuid4 import requests_mock from tests.utils import get_project_list_data from valohai_cli.utils import get_random_string def get_project_mock(create_project_name=None, existing_projects=None): username = get_random_string() project_id = uuid4() m = requests_mock.mock() if isinstance(existing_projects, int): existing_projects = get_project_list_data([get_random_string() for x in range(existing_projects)]) if existing_projects is not None: m.get('https://app.valohai.com/api/v0/projects/', json=existing_projects) if create_project_name: m.post('https://app.valohai.com/api/v0/projects/', json=lambda request, context: { 'id': str(project_id), 'name': create_project_name, 'owner': { 'id': 8, 'username': username, } }) m.get('https://app.valohai.com/api/v0/projects/ownership_options/', json=[username]) m.get(f'https://app.valohai.com/api/v0/projects/{project_id}/', json={ 'id': str(project_id), 'yaml_path': 'valohai.yaml', }) return m
8c015d47fa77ea6de56e194b754939632399ad3e
contones/test/test_geometry.py
contones/test/test_geometry.py
import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def test_init(self): extent = (-120, 38, -110, 45) e1 = Envelope(*extent) extent_inv = (-110, 45, -120, 38) e2 = Envelope(*extent_inv) self.assertEqual(e1.tuple, e2.tuple) def test_invalid(self): with self.assertRaises(ValueError): env = Envelope(80, 2, 1, 2) env = Envelope(2, 1, 1, 2)
import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def setUp(self): extent = (-120, 30, -110, 40) self.en = Envelope(*extent) self.esub = Envelope(-118, 32, -115, 38) def test_contains(self): self.assertIn(self.esub, self.en) self.assertFalse(self.en.contains((0, 0, 0, 0))) self.assertRaises(TypeError, self.en.contains, ()) self.assertRaises(TypeError, self.en.contains, 'something') # FIXME: this should probably throw a TypeError self.assertFalse(self.en.contains('four')) def test_eq(self): self.assertEqual(self.en, Envelope(*self.en.tuple)) def test_init(self): # Test flipped lower-left and upper-right coordinates. self.assertEqual(Envelope(-120, 38, -110, 45), Envelope(-110, 45, -120, 38)) # Zero area envelopes are valid. self.assertIsInstance(Envelope(1, 1, 1, 1), Envelope) def test_intersects(self): # Move lower-left coord further out. overlapping = Envelope(self.en.min_x - 10, self.en.min_y -10, *self.en.ur) self.assertTrue(self.en.intersects(overlapping)) outside = self.en + 15 self.assertFalse(self.en.intersects(outside)) self.assertRaises(TypeError, self.en.intersects, ()) def test_invalid(self): with self.assertRaises(ValueError): Envelope(80, 2, 1, 2) Envelope(2, 1, 1, 2)
Add intersects, contains, and equality tests for Envelope
Add intersects, contains, and equality tests for Envelope
Python
bsd-3-clause
bkg/greenwich
import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def test_init(self): extent = (-120, 38, -110, 45) e1 = Envelope(*extent) extent_inv = (-110, 45, -120, 38) e2 = Envelope(*extent_inv) self.assertEqual(e1.tuple, e2.tuple) def test_invalid(self): with self.assertRaises(ValueError): env = Envelope(80, 2, 1, 2) env = Envelope(2, 1, 1, 2) Add intersects, contains, and equality tests for Envelope
import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def setUp(self): extent = (-120, 30, -110, 40) self.en = Envelope(*extent) self.esub = Envelope(-118, 32, -115, 38) def test_contains(self): self.assertIn(self.esub, self.en) self.assertFalse(self.en.contains((0, 0, 0, 0))) self.assertRaises(TypeError, self.en.contains, ()) self.assertRaises(TypeError, self.en.contains, 'something') # FIXME: this should probably throw a TypeError self.assertFalse(self.en.contains('four')) def test_eq(self): self.assertEqual(self.en, Envelope(*self.en.tuple)) def test_init(self): # Test flipped lower-left and upper-right coordinates. self.assertEqual(Envelope(-120, 38, -110, 45), Envelope(-110, 45, -120, 38)) # Zero area envelopes are valid. self.assertIsInstance(Envelope(1, 1, 1, 1), Envelope) def test_intersects(self): # Move lower-left coord further out. overlapping = Envelope(self.en.min_x - 10, self.en.min_y -10, *self.en.ur) self.assertTrue(self.en.intersects(overlapping)) outside = self.en + 15 self.assertFalse(self.en.intersects(outside)) self.assertRaises(TypeError, self.en.intersects, ()) def test_invalid(self): with self.assertRaises(ValueError): Envelope(80, 2, 1, 2) Envelope(2, 1, 1, 2)
<commit_before>import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def test_init(self): extent = (-120, 38, -110, 45) e1 = Envelope(*extent) extent_inv = (-110, 45, -120, 38) e2 = Envelope(*extent_inv) self.assertEqual(e1.tuple, e2.tuple) def test_invalid(self): with self.assertRaises(ValueError): env = Envelope(80, 2, 1, 2) env = Envelope(2, 1, 1, 2) <commit_msg>Add intersects, contains, and equality tests for Envelope<commit_after>
import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def setUp(self): extent = (-120, 30, -110, 40) self.en = Envelope(*extent) self.esub = Envelope(-118, 32, -115, 38) def test_contains(self): self.assertIn(self.esub, self.en) self.assertFalse(self.en.contains((0, 0, 0, 0))) self.assertRaises(TypeError, self.en.contains, ()) self.assertRaises(TypeError, self.en.contains, 'something') # FIXME: this should probably throw a TypeError self.assertFalse(self.en.contains('four')) def test_eq(self): self.assertEqual(self.en, Envelope(*self.en.tuple)) def test_init(self): # Test flipped lower-left and upper-right coordinates. self.assertEqual(Envelope(-120, 38, -110, 45), Envelope(-110, 45, -120, 38)) # Zero area envelopes are valid. self.assertIsInstance(Envelope(1, 1, 1, 1), Envelope) def test_intersects(self): # Move lower-left coord further out. overlapping = Envelope(self.en.min_x - 10, self.en.min_y -10, *self.en.ur) self.assertTrue(self.en.intersects(overlapping)) outside = self.en + 15 self.assertFalse(self.en.intersects(outside)) self.assertRaises(TypeError, self.en.intersects, ()) def test_invalid(self): with self.assertRaises(ValueError): Envelope(80, 2, 1, 2) Envelope(2, 1, 1, 2)
import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def test_init(self): extent = (-120, 38, -110, 45) e1 = Envelope(*extent) extent_inv = (-110, 45, -120, 38) e2 = Envelope(*extent_inv) self.assertEqual(e1.tuple, e2.tuple) def test_invalid(self): with self.assertRaises(ValueError): env = Envelope(80, 2, 1, 2) env = Envelope(2, 1, 1, 2) Add intersects, contains, and equality tests for Envelopeimport unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def setUp(self): extent = (-120, 30, -110, 40) self.en = Envelope(*extent) self.esub = Envelope(-118, 32, -115, 38) def test_contains(self): self.assertIn(self.esub, self.en) self.assertFalse(self.en.contains((0, 0, 0, 0))) self.assertRaises(TypeError, self.en.contains, ()) self.assertRaises(TypeError, self.en.contains, 'something') # FIXME: this should probably throw a TypeError self.assertFalse(self.en.contains('four')) def test_eq(self): self.assertEqual(self.en, Envelope(*self.en.tuple)) def test_init(self): # Test flipped lower-left and upper-right coordinates. self.assertEqual(Envelope(-120, 38, -110, 45), Envelope(-110, 45, -120, 38)) # Zero area envelopes are valid. self.assertIsInstance(Envelope(1, 1, 1, 1), Envelope) def test_intersects(self): # Move lower-left coord further out. overlapping = Envelope(self.en.min_x - 10, self.en.min_y -10, *self.en.ur) self.assertTrue(self.en.intersects(overlapping)) outside = self.en + 15 self.assertFalse(self.en.intersects(outside)) self.assertRaises(TypeError, self.en.intersects, ()) def test_invalid(self): with self.assertRaises(ValueError): Envelope(80, 2, 1, 2) Envelope(2, 1, 1, 2)
<commit_before>import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def test_init(self): extent = (-120, 38, -110, 45) e1 = Envelope(*extent) extent_inv = (-110, 45, -120, 38) e2 = Envelope(*extent_inv) self.assertEqual(e1.tuple, e2.tuple) def test_invalid(self): with self.assertRaises(ValueError): env = Envelope(80, 2, 1, 2) env = Envelope(2, 1, 1, 2) <commit_msg>Add intersects, contains, and equality tests for Envelope<commit_after>import unittest from contones.geometry import Envelope class EnvelopeTestCase(unittest.TestCase): def setUp(self): extent = (-120, 30, -110, 40) self.en = Envelope(*extent) self.esub = Envelope(-118, 32, -115, 38) def test_contains(self): self.assertIn(self.esub, self.en) self.assertFalse(self.en.contains((0, 0, 0, 0))) self.assertRaises(TypeError, self.en.contains, ()) self.assertRaises(TypeError, self.en.contains, 'something') # FIXME: this should probably throw a TypeError self.assertFalse(self.en.contains('four')) def test_eq(self): self.assertEqual(self.en, Envelope(*self.en.tuple)) def test_init(self): # Test flipped lower-left and upper-right coordinates. self.assertEqual(Envelope(-120, 38, -110, 45), Envelope(-110, 45, -120, 38)) # Zero area envelopes are valid. self.assertIsInstance(Envelope(1, 1, 1, 1), Envelope) def test_intersects(self): # Move lower-left coord further out. overlapping = Envelope(self.en.min_x - 10, self.en.min_y -10, *self.en.ur) self.assertTrue(self.en.intersects(overlapping)) outside = self.en + 15 self.assertFalse(self.en.intersects(outside)) self.assertRaises(TypeError, self.en.intersects, ()) def test_invalid(self): with self.assertRaises(ValueError): Envelope(80, 2, 1, 2) Envelope(2, 1, 1, 2)
bbd8b027eecc48266dfeee12419a6bcd807bdf65
tests/__init__.py
tests/__init__.py
import os import unittest import pytest class ScraperTest(unittest.TestCase): online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url)
import os import unittest import pytest class ScraperTest(unittest.TestCase): maxDiff = None online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url)
Set maxDiff to 'None' on the base ScraperTest class
Set maxDiff to 'None' on the base ScraperTest class
Python
mit
hhursev/recipe-scraper
import os import unittest import pytest class ScraperTest(unittest.TestCase): online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url) Set maxDiff to 'None' on the base ScraperTest class
import os import unittest import pytest class ScraperTest(unittest.TestCase): maxDiff = None online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url)
<commit_before>import os import unittest import pytest class ScraperTest(unittest.TestCase): online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url) <commit_msg>Set maxDiff to 'None' on the base ScraperTest class<commit_after>
import os import unittest import pytest class ScraperTest(unittest.TestCase): maxDiff = None online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url)
import os import unittest import pytest class ScraperTest(unittest.TestCase): online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url) Set maxDiff to 'None' on the base ScraperTest classimport os import unittest import pytest class ScraperTest(unittest.TestCase): maxDiff = None online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url)
<commit_before>import os import unittest import pytest class ScraperTest(unittest.TestCase): online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url) <commit_msg>Set maxDiff to 'None' on the base ScraperTest class<commit_after>import os import unittest import pytest class ScraperTest(unittest.TestCase): maxDiff = None online = False test_file_name = None def setUp(self): os.environ[ "RECIPE_SCRAPERS_SETTINGS" ] = "tests.test_data.test_settings_module.test_settings" test_file_name = ( self.test_file_name if self.test_file_name else self.scraper_class.__name__.lower() ) with open( "tests/test_data/{}.testhtml".format(test_file_name), encoding="utf-8" ) as testfile: self.harvester_class = self.scraper_class(testfile) canonical_url = self.harvester_class.canonical_url() if self.online: if not canonical_url: pytest.skip( f"could not find canonical url for online test of scraper '{self.scraper_class.__name__}'" ) self.harvester_class = self.scraper_class(url=canonical_url)
181d3b06bf985d0ccec156363ecd4fe3792ddf1a
scripts/assignment_test.py
scripts/assignment_test.py
# -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([])[0] gp=self.erp.GiscedataPolissa.browse([])[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) if __name__ == '__main__': unittest.main()
# -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([],limit=1)[0] gp=self.erp.GiscedataPolissa.browse([], limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) """def test_no_duplication(self): rp=self.erp.ResPartner.browse([], limit=1)[0] gp=self.erp.GiscedataPolissa.browse([],limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]])""" if __name__ == '__main__': unittest.main()
Refactor of one assignment test
Refactor of one assignment test
Python
agpl-3.0
Som-Energia/somenergia-generationkwh,Som-Energia/somenergia-generationkwh
# -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([])[0] gp=self.erp.GiscedataPolissa.browse([])[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) if __name__ == '__main__': unittest.main() Refactor of one assignment test
# -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([],limit=1)[0] gp=self.erp.GiscedataPolissa.browse([], limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) """def test_no_duplication(self): rp=self.erp.ResPartner.browse([], limit=1)[0] gp=self.erp.GiscedataPolissa.browse([],limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]])""" if __name__ == '__main__': unittest.main()
<commit_before># -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([])[0] gp=self.erp.GiscedataPolissa.browse([])[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) if __name__ == '__main__': unittest.main() <commit_msg>Refactor of one assignment test<commit_after>
# -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([],limit=1)[0] gp=self.erp.GiscedataPolissa.browse([], limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) """def test_no_duplication(self): rp=self.erp.ResPartner.browse([], limit=1)[0] gp=self.erp.GiscedataPolissa.browse([],limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]])""" if __name__ == '__main__': unittest.main()
# -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([])[0] gp=self.erp.GiscedataPolissa.browse([])[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) if __name__ == '__main__': unittest.main() Refactor of one assignment test# -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([],limit=1)[0] gp=self.erp.GiscedataPolissa.browse([], limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) """def test_no_duplication(self): rp=self.erp.ResPartner.browse([], limit=1)[0] gp=self.erp.GiscedataPolissa.browse([],limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]])""" if __name__ == '__main__': unittest.main()
<commit_before># -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([])[0] gp=self.erp.GiscedataPolissa.browse([])[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) if __name__ == '__main__': unittest.main() <commit_msg>Refactor of one assignment test<commit_after># -*- coding: utf-8 -*- import unittest dbconfig = None try: import dbconfig import erppeek except ImportError: pass @unittest.skipIf(not dbconfig, "depends on ERP") class Assignment_Test(unittest.TestCase): def setUp(self): self.erp = erppeek.Client(**dbconfig.erppeek) self.Assignments = self.erp.GenerationkwhAssignments self.tearDown() def setupProvider(self,assignments=[]): self.Assignments.add(assignments) def assertAssignmentsEqual(self, expectation): result = self.Assignments.browse([]) self.assertEqual([ [r.active, r.polissa_id.id, r.member_id.id, r.priority] for r in result], expectation) def tearDown(self): for a in self.Assignments.browse([]): a.unlink() def test_no_assignments(self): self.setupProvider() self.assertAssignmentsEqual([]) def test_one_assignment(self): rp=self.erp.ResPartner.browse([],limit=1)[0] gp=self.erp.GiscedataPolissa.browse([], limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]]) """def test_no_duplication(self): rp=self.erp.ResPartner.browse([], limit=1)[0] gp=self.erp.GiscedataPolissa.browse([],limit=1)[0] self.setupProvider([[True,gp.id,rp.id,1]]) self.assertAssignmentsEqual([[True,gp.id,rp.id,1]])""" if __name__ == '__main__': unittest.main()
120bff1f3bdf347351c6903dc3df0cd51f1837c6
tools/clean_output_directory.py
tools/clean_output_directory.py
#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root shutil.rmtree(build_root, ignore_errors=True) return 0 if __name__ == '__main__': sys.exit(Main())
#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root if sys.platform != 'win32': shutil.rmtree(build_root, ignore_errors=True) else: # Intentionally ignore return value since a directory might be in use. subprocess.call(['rmdir', '/Q', '/S', build_root], env=os.environ.copy(), shell=True) return 0 if __name__ == '__main__': sys.exit(Main())
Fix build directory cleaner, to not follow links on Windows.
Fix build directory cleaner, to not follow links on Windows. BUG= R=ricow@google.com Review URL: https://codereview.chromium.org//1219833003.
Python
bsd-3-clause
dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dartino/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dartino/dart-sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-archive/dart-sdk,dart-lang/sdk,dart-lang/sdk,dart-lang/sdk,dartino/dart-sdk,dartino/dart-sdk,dart-archive/dart-sdk
#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root shutil.rmtree(build_root, ignore_errors=True) return 0 if __name__ == '__main__': sys.exit(Main()) Fix build directory cleaner, to not follow links on Windows. BUG= R=ricow@google.com Review URL: https://codereview.chromium.org//1219833003.
#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root if sys.platform != 'win32': shutil.rmtree(build_root, ignore_errors=True) else: # Intentionally ignore return value since a directory might be in use. subprocess.call(['rmdir', '/Q', '/S', build_root], env=os.environ.copy(), shell=True) return 0 if __name__ == '__main__': sys.exit(Main())
<commit_before>#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root shutil.rmtree(build_root, ignore_errors=True) return 0 if __name__ == '__main__': sys.exit(Main()) <commit_msg>Fix build directory cleaner, to not follow links on Windows. BUG= R=ricow@google.com Review URL: https://codereview.chromium.org//1219833003.<commit_after>
#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root if sys.platform != 'win32': shutil.rmtree(build_root, ignore_errors=True) else: # Intentionally ignore return value since a directory might be in use. subprocess.call(['rmdir', '/Q', '/S', build_root], env=os.environ.copy(), shell=True) return 0 if __name__ == '__main__': sys.exit(Main())
#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root shutil.rmtree(build_root, ignore_errors=True) return 0 if __name__ == '__main__': sys.exit(Main()) Fix build directory cleaner, to not follow links on Windows. BUG= R=ricow@google.com Review URL: https://codereview.chromium.org//1219833003.#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root if sys.platform != 'win32': shutil.rmtree(build_root, ignore_errors=True) else: # Intentionally ignore return value since a directory might be in use. subprocess.call(['rmdir', '/Q', '/S', build_root], env=os.environ.copy(), shell=True) return 0 if __name__ == '__main__': sys.exit(Main())
<commit_before>#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root shutil.rmtree(build_root, ignore_errors=True) return 0 if __name__ == '__main__': sys.exit(Main()) <commit_msg>Fix build directory cleaner, to not follow links on Windows. BUG= R=ricow@google.com Review URL: https://codereview.chromium.org//1219833003.<commit_after>#!/usr/bin/env python # # Copyright (c) 2012, the Dart project authors. Please see the AUTHORS file # for details. All rights reserved. Use of this source code is governed by a # BSD-style license that can be found in the LICENSE file. # import shutil import sys import utils def Main(): build_root = utils.GetBuildRoot(utils.GuessOS()) print 'Deleting %s' % build_root if sys.platform != 'win32': shutil.rmtree(build_root, ignore_errors=True) else: # Intentionally ignore return value since a directory might be in use. subprocess.call(['rmdir', '/Q', '/S', build_root], env=os.environ.copy(), shell=True) return 0 if __name__ == '__main__': sys.exit(Main())
a0605e1be5980f9c2f80fe0e751e736a3f4b48ef
fiji_skeleton_macro.py
fiji_skeleton_macro.py
import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2])
import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.run(imp, "Analyze Skeleton (2D/3D)", "prune=none prune") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2])
Add pruning step to skeletonization
Add pruning step to skeletonization This requires an updated Fiji, as detailed in this mailing list thread: https://list.nih.gov/cgi-bin/wa.exe?A1=ind1308&L=IMAGEJ#41 https://list.nih.gov/cgi-bin/wa.exe?A2=ind1308&L=IMAGEJ&F=&S=&P=36891
Python
bsd-3-clause
jni/skeletons
import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2]) Add pruning step to skeletonization This requires an updated Fiji, as detailed in this mailing list thread: https://list.nih.gov/cgi-bin/wa.exe?A1=ind1308&L=IMAGEJ#41 https://list.nih.gov/cgi-bin/wa.exe?A2=ind1308&L=IMAGEJ&F=&S=&P=36891
import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.run(imp, "Analyze Skeleton (2D/3D)", "prune=none prune") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2])
<commit_before>import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2]) <commit_msg>Add pruning step to skeletonization This requires an updated Fiji, as detailed in this mailing list thread: https://list.nih.gov/cgi-bin/wa.exe?A1=ind1308&L=IMAGEJ#41 https://list.nih.gov/cgi-bin/wa.exe?A2=ind1308&L=IMAGEJ&F=&S=&P=36891<commit_after>
import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.run(imp, "Analyze Skeleton (2D/3D)", "prune=none prune") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2])
import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2]) Add pruning step to skeletonization This requires an updated Fiji, as detailed in this mailing list thread: https://list.nih.gov/cgi-bin/wa.exe?A1=ind1308&L=IMAGEJ#41 https://list.nih.gov/cgi-bin/wa.exe?A2=ind1308&L=IMAGEJ&F=&S=&P=36891import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.run(imp, "Analyze Skeleton (2D/3D)", "prune=none prune") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2])
<commit_before>import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2]) <commit_msg>Add pruning step to skeletonization This requires an updated Fiji, as detailed in this mailing list thread: https://list.nih.gov/cgi-bin/wa.exe?A1=ind1308&L=IMAGEJ#41 https://list.nih.gov/cgi-bin/wa.exe?A2=ind1308&L=IMAGEJ&F=&S=&P=36891<commit_after>import sys from ij import IJ def ij_binary_skeletonize(impath_in, impath_out): """Load image `impath`, skeletonize it, and save it to the same file. Parameters ---------- impath_in : string Path to a 3D image file. impath_out : string Path to which to write the skeleton image file. Returns ------- None """ imp = IJ.openImage(impath_in) IJ.run(imp, "Skeletonize (2D/3D)", "") IJ.run(imp, "Analyze Skeleton (2D/3D)", "prune=none prune") IJ.saveAs(imp, "Tiff", impath_out) imp.close() if __name__ == '__main__': print sys.argv ij_binary_skeletonize(sys.argv[1], sys.argv[2])
f2b52883921af4c006680d58df43f32da739554e
mediachain/translation/lookup.py
mediachain/translation/lookup.py
from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this path = join(expanduser('~'), '.mediachain') if not os.path.exists(path): os.makedirs(path) with ChDir(path): translator = ipfs.client.get(version) # FIXME: timeout, error handling sys.path.append(path) # print('dynamic module load path: {}'.format(path)) full_path = version + '.translator' # print('loading translator module from {}'.format(full_path)) translator_module = __import__(full_path, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator
from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os import shutil from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this basepath = join(expanduser('~'), '.mediachain') path = join(basepath, 'mediachain', 'translation') if not os.path.exists(path): os.makedirs(path) with ChDir(path): shutil.rmtree(name) translator = ipfs.client.get(version) # FIXME: timeout, error handling os.rename(version, name) # ipfsApi doesn't support -o sys.path.append(path) full_name = 'mediachain.translation.' + name + '.translator' translator_module = __import__(full_name, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator
Use normal full names for translators
Use normal full names for translators - Remove and replace any existing module - Pull new module from ipfs and rename from multihash to normal name (ipfsApi doesn't support specifying output paths) - Rename full_path to full_name for consistency
Python
mit
mediachain/mediachain-client,mediachain/mediachain-client
from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this path = join(expanduser('~'), '.mediachain') if not os.path.exists(path): os.makedirs(path) with ChDir(path): translator = ipfs.client.get(version) # FIXME: timeout, error handling sys.path.append(path) # print('dynamic module load path: {}'.format(path)) full_path = version + '.translator' # print('loading translator module from {}'.format(full_path)) translator_module = __import__(full_path, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator Use normal full names for translators - Remove and replace any existing module - Pull new module from ipfs and rename from multihash to normal name (ipfsApi doesn't support specifying output paths) - Rename full_path to full_name for consistency
from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os import shutil from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this basepath = join(expanduser('~'), '.mediachain') path = join(basepath, 'mediachain', 'translation') if not os.path.exists(path): os.makedirs(path) with ChDir(path): shutil.rmtree(name) translator = ipfs.client.get(version) # FIXME: timeout, error handling os.rename(version, name) # ipfsApi doesn't support -o sys.path.append(path) full_name = 'mediachain.translation.' + name + '.translator' translator_module = __import__(full_name, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator
<commit_before>from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this path = join(expanduser('~'), '.mediachain') if not os.path.exists(path): os.makedirs(path) with ChDir(path): translator = ipfs.client.get(version) # FIXME: timeout, error handling sys.path.append(path) # print('dynamic module load path: {}'.format(path)) full_path = version + '.translator' # print('loading translator module from {}'.format(full_path)) translator_module = __import__(full_path, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator <commit_msg>Use normal full names for translators - Remove and replace any existing module - Pull new module from ipfs and rename from multihash to normal name (ipfsApi doesn't support specifying output paths) - Rename full_path to full_name for consistency<commit_after>
from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os import shutil from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this basepath = join(expanduser('~'), '.mediachain') path = join(basepath, 'mediachain', 'translation') if not os.path.exists(path): os.makedirs(path) with ChDir(path): shutil.rmtree(name) translator = ipfs.client.get(version) # FIXME: timeout, error handling os.rename(version, name) # ipfsApi doesn't support -o sys.path.append(path) full_name = 'mediachain.translation.' + name + '.translator' translator_module = __import__(full_name, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator
from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this path = join(expanduser('~'), '.mediachain') if not os.path.exists(path): os.makedirs(path) with ChDir(path): translator = ipfs.client.get(version) # FIXME: timeout, error handling sys.path.append(path) # print('dynamic module load path: {}'.format(path)) full_path = version + '.translator' # print('loading translator module from {}'.format(full_path)) translator_module = __import__(full_path, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator Use normal full names for translators - Remove and replace any existing module - Pull new module from ipfs and rename from multihash to normal name (ipfsApi doesn't support specifying output paths) - Rename full_path to full_name for consistencyfrom mediachain.datastore.ipfs import get_ipfs_datastore import sys import os import shutil from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this basepath = join(expanduser('~'), '.mediachain') path = join(basepath, 'mediachain', 'translation') if not os.path.exists(path): os.makedirs(path) with ChDir(path): shutil.rmtree(name) translator = ipfs.client.get(version) # FIXME: timeout, error handling os.rename(version, name) # ipfsApi doesn't support -o sys.path.append(path) full_name = 'mediachain.translation.' + name + '.translator' translator_module = __import__(full_name, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator
<commit_before>from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this path = join(expanduser('~'), '.mediachain') if not os.path.exists(path): os.makedirs(path) with ChDir(path): translator = ipfs.client.get(version) # FIXME: timeout, error handling sys.path.append(path) # print('dynamic module load path: {}'.format(path)) full_path = version + '.translator' # print('loading translator module from {}'.format(full_path)) translator_module = __import__(full_path, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator <commit_msg>Use normal full names for translators - Remove and replace any existing module - Pull new module from ipfs and rename from multihash to normal name (ipfsApi doesn't support specifying output paths) - Rename full_path to full_name for consistency<commit_after>from mediachain.datastore.ipfs import get_ipfs_datastore import sys import os import shutil from os.path import expanduser, join class ChDir(object): """ Step into a directory temporarily """ def __init__(self, path): self.old_dir = os.getcwd() self.new_dir = path def __enter__(self): os.chdir(self.new_dir) def __exit__(self, *args): os.chdir(self.old_dir) def get_translator(translator_id): try: name, version = translator_id.split('@') except ValueError: raise LookupError( "Bad translator id `{}`, must be `name@multihash` format".format(translator_id) ) ipfs = get_ipfs_datastore() # FIXME: memoize this basepath = join(expanduser('~'), '.mediachain') path = join(basepath, 'mediachain', 'translation') if not os.path.exists(path): os.makedirs(path) with ChDir(path): shutil.rmtree(name) translator = ipfs.client.get(version) # FIXME: timeout, error handling os.rename(version, name) # ipfsApi doesn't support -o sys.path.append(path) full_name = 'mediachain.translation.' + name + '.translator' translator_module = __import__(full_name, globals(), locals(), [name]) translator = getattr(translator_module, name.capitalize()) return translator
327ddb6db4009cf329ac0f8fb22b56b002e7ef96
server/adventures/tests.py
server/adventures/tests.py
from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1)
from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) class SettingTests(TestCase): def test_create_author(self): fr = Setting.objects.create(name='Forgotten Realms') self.assertEqual(Setting.objects.first(), fr) self.assertEqual(Setting.objects.count(), 1)
Add Setting model creation test
Add Setting model creation test
Python
mit
petertrotman/adventurelookup,petertrotman/adventurelookup,petertrotman/adventurelookup,petertrotman/adventurelookup
from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) Add Setting model creation test
from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) class SettingTests(TestCase): def test_create_author(self): fr = Setting.objects.create(name='Forgotten Realms') self.assertEqual(Setting.objects.first(), fr) self.assertEqual(Setting.objects.count(), 1)
<commit_before>from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) <commit_msg>Add Setting model creation test<commit_after>
from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) class SettingTests(TestCase): def test_create_author(self): fr = Setting.objects.create(name='Forgotten Realms') self.assertEqual(Setting.objects.first(), fr) self.assertEqual(Setting.objects.count(), 1)
from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) Add Setting model creation testfrom django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) class SettingTests(TestCase): def test_create_author(self): fr = Setting.objects.create(name='Forgotten Realms') self.assertEqual(Setting.objects.first(), fr) self.assertEqual(Setting.objects.count(), 1)
<commit_before>from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) <commit_msg>Add Setting model creation test<commit_after>from django.test import TestCase from .models import Author, Publisher, Edition, Setting, Adventure class AuthorTests(TestCase): def test_create_author(self): gygax = Author.objects.create(name='Gary Gygax') self.assertEqual(Author.objects.first(), gygax) self.assertEqual(Author.objects.count(), 1) class PublisherTests(TestCase): def test_create_author(self): wotc = Publisher.objects.create(name='Wizards of the Coast') self.assertEqual(Publisher.objects.first(), wotc) self.assertEqual(Publisher.objects.count(), 1) class EditionTests(TestCase): def test_create_author(self): odandd = Edition.objects.create(name='OD&D') self.assertEqual(Edition.objects.first(), odandd) self.assertEqual(Edition.objects.count(), 1) class SettingTests(TestCase): def test_create_author(self): fr = Setting.objects.create(name='Forgotten Realms') self.assertEqual(Setting.objects.first(), fr) self.assertEqual(Setting.objects.count(), 1)
a3187d16a70966c84a4f4977768fcfefc93b5a6d
this_app/forms.py
this_app/forms.py
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()])
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) class BucketlistItemForm(FlaskForm): """Form to CRUd a bucketlist item""" title = StringField("Title", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) status = BooleanField("Status", validators=[DataRequired])
Add form to create a bucketlist item
Add form to create a bucketlist item
Python
mit
borenho/flask-bucketlist,borenho/flask-bucketlist
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) Add form to create a bucketlist item
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) class BucketlistItemForm(FlaskForm): """Form to CRUd a bucketlist item""" title = StringField("Title", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) status = BooleanField("Status", validators=[DataRequired])
<commit_before>from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) <commit_msg>Add form to create a bucketlist item<commit_after>
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) class BucketlistItemForm(FlaskForm): """Form to CRUd a bucketlist item""" title = StringField("Title", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) status = BooleanField("Status", validators=[DataRequired])
from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) Add form to create a bucketlist itemfrom flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) class BucketlistItemForm(FlaskForm): """Form to CRUd a bucketlist item""" title = StringField("Title", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) status = BooleanField("Status", validators=[DataRequired])
<commit_before>from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) <commit_msg>Add form to create a bucketlist item<commit_after>from flask_wtf import FlaskForm from wtforms import StringField, PasswordField, BooleanField, TextAreaField from wtforms.validators import DataRequired, Length, Email class SignupForm(FlaskForm): """Render and validate the signup form""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) username = StringField("Username", validators=[DataRequired(), Length(2, 32)]) password = PasswordField("Password", validators=[DataRequired(), Length(min=4, max=32)]) class LoginForm(FlaskForm): """Form to let users login""" email = StringField("Email", validators=[DataRequired(), Email(message="Invalid email format"), Length(max=32)]) password = PasswordField("Password", validators=[DataRequired(), Length(4, 32)]) remember = BooleanField("Remember Me") class BucketlistForm(FlaskForm): """Form to CRUd a bucketlist""" name = StringField("Name", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) class BucketlistItemForm(FlaskForm): """Form to CRUd a bucketlist item""" title = StringField("Title", validators=[DataRequired()]) description = TextAreaField("Description", validators=[DataRequired()]) status = BooleanField("Status", validators=[DataRequired])
674491d8806ae2a56b747622f16f25e75732925e
db/util.py
db/util.py
import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name): """Creates a table if it doesn't exist.""" try: r.table_create(table_name).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn())
import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name, *args, **kwargs): """Creates a table if it doesn't exist.""" try: r.table_create(table_name, *args, **kwargs).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn())
Allow additional args for ensure_table
Allow additional args for ensure_table I had this at some point in some diff somewhere, but it got lost... that's worrying. Maybe in a stash somewhere.
Python
mit
shaialon/vim-awesome,divad12/vim-awesome,shaialon/vim-awesome,vim-awesome/vim-awesome,vim-awesome/vim-awesome,divad12/vim-awesome,vim-awesome/vim-awesome,starcraftman/vim-awesome,divad12/vim-awesome,jonafato/vim-awesome,starcraftman/vim-awesome,jonafato/vim-awesome,jonafato/vim-awesome,starcraftman/vim-awesome,shaialon/vim-awesome,starcraftman/vim-awesome,vim-awesome/vim-awesome,vim-awesome/vim-awesome,jonafato/vim-awesome,shaialon/vim-awesome,divad12/vim-awesome
import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name): """Creates a table if it doesn't exist.""" try: r.table_create(table_name).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn()) Allow additional args for ensure_table I had this at some point in some diff somewhere, but it got lost... that's worrying. Maybe in a stash somewhere.
import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name, *args, **kwargs): """Creates a table if it doesn't exist.""" try: r.table_create(table_name, *args, **kwargs).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn())
<commit_before>import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name): """Creates a table if it doesn't exist.""" try: r.table_create(table_name).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn()) <commit_msg>Allow additional args for ensure_table I had this at some point in some diff somewhere, but it got lost... that's worrying. Maybe in a stash somewhere.<commit_after>
import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name, *args, **kwargs): """Creates a table if it doesn't exist.""" try: r.table_create(table_name, *args, **kwargs).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn())
import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name): """Creates a table if it doesn't exist.""" try: r.table_create(table_name).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn()) Allow additional args for ensure_table I had this at some point in some diff somewhere, but it got lost... that's worrying. Maybe in a stash somewhere.import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name, *args, **kwargs): """Creates a table if it doesn't exist.""" try: r.table_create(table_name, *args, **kwargs).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn())
<commit_before>import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name): """Creates a table if it doesn't exist.""" try: r.table_create(table_name).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn()) <commit_msg>Allow additional args for ensure_table I had this at some point in some diff somewhere, but it got lost... that's worrying. Maybe in a stash somewhere.<commit_after>import rethinkdb as r # TODO(alpert): Read port and db from app.config? def r_conn(box=[None]): if box[0] is None: box[0] = r.connect() box[0].use('vim_awesome') return box[0] def get_first(query): results = list(query.limit(1).run(r_conn())) return results[0] if results else None def ensure_table(table_name, *args, **kwargs): """Creates a table if it doesn't exist.""" try: r.table_create(table_name, *args, **kwargs).run(r_conn()) except r.RqlRuntimeError: pass # Ignore db already created def ensure_index(table_name, index_name, *args, **kwargs): """Creates an index if it doesn't exist.""" indices = r.table(table_name).index_list().run(r_conn()) if index_name not in indices: r.table(table_name).index_create(index_name, *args, **kwargs).run( r_conn())
73c842af63a09add43c0e33336dd4eb21153fda1
bin/database.py
bin/database.py
#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new schema change is made def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
Fix tab vs spaces issue
Fix tab vs spaces issue
Python
mit
scitran/api,scitran/api,scitran/core,scitran/core,scitran/core,scitran/core
#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0 Fix tab vs spaces issue
#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new schema change is made def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
<commit_before>#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0 <commit_msg>Fix tab vs spaces issue<commit_after>
#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new schema change is made def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0 Fix tab vs spaces issue#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new schema change is made def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
<commit_before>#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0 <commit_msg>Fix tab vs spaces issue<commit_after>#!/usr/bin/env python import json from api import config CURRENT_DATABASE_VERSION = 1 # An int that is bumped when a new schema change is made def confirm_schema_match(): """ Checks version of database schema Returns (0) if DB schema version matches requirements. Returns (42) if DB schema version does not match requirements and can be upgraded. Returns (43) if DB schema version does not match requirements and cannot be upgraded, perhaps because code is at lower version than the DB schema version. """ version = config.db.version.find_one({"_id": "version"}) if version is None or version.get('database', None) is None: return 42 # At version 0 db_version = version.get('database', 0) if not isinstance(db_version, int) or db_version > CURRENT_DATABASE_VERSION: return 43 elif db_version < CURRENT_DATABASE_VERSION: return 42 else: return 0 def upgrade_schema(): """ Upgrades db to the current schema version Returns (0) if upgrade is successful """ # In progress # db_version = version.get('database',0) # if db_version < 1: # # rename the metadata fields # config.db.container.update_many({}, {"$rename": {"metadata": "info"}}) # config.db.version.update_one({"_id": "version"}, {"$set": {"database": CURRENT_DATABASE_VERSION}}) return 0
95a5c3076dbe5f967a44988c469ca96d660d0679
muzicast/collection/fswatcher.py
muzicast/collection/fswatcher.py
from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): print "Created", dir(event), event.src_path def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path
import os from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): # a created event is always followed by a modify event # due to the actual data write that occurs to the file # unless its a zero-byte file in which case we don't want # to scan it anyway. So on_created does not do any handling # it is done by on_modified pass def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path if event.is_directory: self.scanner.scan_directory(event.src_path, False) else: self.scanner.scan_directory(os.path.dirname(event.src_path), False)
Update on file modification by running a scan on the directory
Update on file modification by running a scan on the directory
Python
mit
nikhilm/muzicast,nikhilm/muzicast
from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): print "Created", dir(event), event.src_path def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path Update on file modification by running a scan on the directory
import os from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): # a created event is always followed by a modify event # due to the actual data write that occurs to the file # unless its a zero-byte file in which case we don't want # to scan it anyway. So on_created does not do any handling # it is done by on_modified pass def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path if event.is_directory: self.scanner.scan_directory(event.src_path, False) else: self.scanner.scan_directory(os.path.dirname(event.src_path), False)
<commit_before>from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): print "Created", dir(event), event.src_path def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path <commit_msg>Update on file modification by running a scan on the directory<commit_after>
import os from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): # a created event is always followed by a modify event # due to the actual data write that occurs to the file # unless its a zero-byte file in which case we don't want # to scan it anyway. So on_created does not do any handling # it is done by on_modified pass def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path if event.is_directory: self.scanner.scan_directory(event.src_path, False) else: self.scanner.scan_directory(os.path.dirname(event.src_path), False)
from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): print "Created", dir(event), event.src_path def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path Update on file modification by running a scan on the directoryimport os from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): # a created event is always followed by a modify event # due to the actual data write that occurs to the file # unless its a zero-byte file in which case we don't want # to scan it anyway. So on_created does not do any handling # it is done by on_modified pass def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path if event.is_directory: self.scanner.scan_directory(event.src_path, False) else: self.scanner.scan_directory(os.path.dirname(event.src_path), False)
<commit_before>from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): print "Created", dir(event), event.src_path def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path <commit_msg>Update on file modification by running a scan on the directory<commit_after>import os from watchdog.events import FileSystemEventHandler from muzicast.meta import Track class CollectionEventHandler(FileSystemEventHandler): def __init__(self, scanner): FileSystemEventHandler.__init__(self) self.scanner = scanner def on_moved(self, event): # a move is simple to handle, search for track # with src and modify it to dest # TODO(nikhil) handle directory move print event.src_path, '->', event.dest_path entries = list(Track.selectBy(url='file://' + event.src_path)) if entries: for entry in entries: print entry def on_created(self, event): # a created event is always followed by a modify event # due to the actual data write that occurs to the file # unless its a zero-byte file in which case we don't want # to scan it anyway. So on_created does not do any handling # it is done by on_modified pass def on_deleted(self, event): # instead of bothering with file/directory changes # we simply match path and drop all tracks. tracks = Track.select(Track.q.url.startswith('file://'+event.src_path)) [track.destroySelf() for track in tracks] def on_modified(self, event): print "Modified", dir(event), event.src_path if event.is_directory: self.scanner.scan_directory(event.src_path, False) else: self.scanner.scan_directory(os.path.dirname(event.src_path), False)
dd65699f72ec951321ef0957dfdfc2d37f44b4d9
app/tests/settings.py
app/tests/settings.py
import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False
import logging import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False # Disable non-critical logging in tests logging.disable(logging.CRITICAL)
Disable non-critical logging in tests
Disable non-critical logging in tests
Python
apache-2.0
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False Disable non-critical logging in tests
import logging import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False # Disable non-critical logging in tests logging.disable(logging.CRITICAL)
<commit_before>import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False <commit_msg>Disable non-critical logging in tests<commit_after>
import logging import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False # Disable non-critical logging in tests logging.disable(logging.CRITICAL)
import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False Disable non-critical logging in testsimport logging import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False # Disable non-critical logging in tests logging.disable(logging.CRITICAL)
<commit_before>import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False <commit_msg>Disable non-critical logging in tests<commit_after>import logging import os # Set environment variables before importing settings os.environ["PROTECTED_S3_CUSTOM_DOMAIN"] = "testserver/media" # noinspection PyUnresolvedReferences from config.settings import * """ Settings overrides for tests """ ALLOWED_HOSTS = [".testserver"] WHITENOISE_AUTOREFRESH = True PASSWORD_HASHERS = ["django.contrib.auth.hashers.MD5PasswordHasher"] EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend" # Disable debugging in tests DEBUG = False TEMPLATE_DEBUG = False DEBUG_LOGGING = False THUMBNAIL_DEBUG = False # Disable non-critical logging in tests logging.disable(logging.CRITICAL)
03866f41c15a22e66a2ba43cf1b4b78f991e5d8c
app/versions.py
app/versions.py
from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( self._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp()
from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( APIVersion._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp()
Call APIVersion._api_version_detail not nonexistent self._api_version_detail.
Call APIVersion._api_version_detail not nonexistent self._api_version_detail.
Python
apache-2.0
NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api,NCI-Cloud/reporting-api,NeCTAR-RC/reporting-api
from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( self._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp() Call APIVersion._api_version_detail not nonexistent self._api_version_detail.
from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( APIVersion._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp()
<commit_before>from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( self._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp() <commit_msg>Call APIVersion._api_version_detail not nonexistent self._api_version_detail.<commit_after>
from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( APIVersion._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp()
from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( self._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp() Call APIVersion._api_version_detail not nonexistent self._api_version_detail.from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( APIVersion._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp()
<commit_before>from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( self._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp() <commit_msg>Call APIVersion._api_version_detail not nonexistent self._api_version_detail.<commit_after>from common.apiversion import APIVersion from common.application import Application class VersionsApp(Application): def APIVersionList(self, req, args): return ( [ version._api_version_detail(req) for version in APIVersion.version_classes ], None ) def APIVersionDetails(self, req, params): return ( APIVersion._api_version_detail(req), None ) def factory(global_config, **settings): return VersionsApp()
a5200bf744b819deff5f2301f5affdc524754a9a
code/png/__init__.py
code/png/__init__.py
from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytes
try: from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytes except ImportError: _png = __import__(__name__ + '.png') _to_import = _png.png.__all__ _to_import.extend(('_main', 'strtobytes')) for it in _to_import: locals()[it] = eval('_png.png.' + it)
Fix compatibility with absolute import of Py3
Fix compatibility with absolute import of Py3
Python
mit
Scondo/purepng,Scondo/purepng
from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytesFix compatibility with absolute import of Py3
try: from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytes except ImportError: _png = __import__(__name__ + '.png') _to_import = _png.png.__all__ _to_import.extend(('_main', 'strtobytes')) for it in _to_import: locals()[it] = eval('_png.png.' + it)
<commit_before>from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytes<commit_msg>Fix compatibility with absolute import of Py3<commit_after>
try: from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytes except ImportError: _png = __import__(__name__ + '.png') _to_import = _png.png.__all__ _to_import.extend(('_main', 'strtobytes')) for it in _to_import: locals()[it] = eval('_png.png.' + it)
from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytesFix compatibility with absolute import of Py3try: from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytes except ImportError: _png = __import__(__name__ + '.png') _to_import = _png.png.__all__ _to_import.extend(('_main', 'strtobytes')) for it in _to_import: locals()[it] = eval('_png.png.' + it)
<commit_before>from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytes<commit_msg>Fix compatibility with absolute import of Py3<commit_after>try: from png import * # Following methods are not parts of API and imports only for unittest from png import _main from png import strtobytes except ImportError: _png = __import__(__name__ + '.png') _to_import = _png.png.__all__ _to_import.extend(('_main', 'strtobytes')) for it in _to_import: locals()[it] = eval('_png.png.' + it)
73c84754699a6f0803d0ceb3081988b45c9c76e7
contours/__init__.py
contours/__init__.py
# -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
# -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support from __future__ import absolute_import from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
Remove unneeded Python 2.7 compatibility imports.
Remove unneeded Python 2.7 compatibility imports.
Python
mit
ccarocean/python-contours
# -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0' Remove unneeded Python 2.7 compatibility imports.
# -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support from __future__ import absolute_import from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
<commit_before># -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0' <commit_msg>Remove unneeded Python 2.7 compatibility imports.<commit_after>
# -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support from __future__ import absolute_import from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
# -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0' Remove unneeded Python 2.7 compatibility imports.# -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support from __future__ import absolute_import from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
<commit_before># -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support # pylint: disable=redefined-builtin,unused-wildcard-import,wildcard-import from __future__ import (absolute_import, division, print_function, unicode_literals) from builtins import * from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0' <commit_msg>Remove unneeded Python 2.7 compatibility imports.<commit_after># -* coding: utf-8 -*- """Contour calculations.""" # Python 2 support from __future__ import absolute_import from .core import numpy_formatter, matlab_formatter, shapely_formatter from .quad import QuadContourGenerator __version__ = '0.0.2.dev0'
2ad9cf280ee1743f1ad542d3c0c8d8365caea11e
condatestall.py
condatestall.py
""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys NPY = '16', '17' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main()
""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys if '-q' in sys.argv[1:]: NPY = '18', else: NPY = '16', '17', '18' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main()
Add option for quick test on all python version
Add option for quick test on all python version
Python
bsd-2-clause
pitrou/numba,GaZ3ll3/numba,pombredanne/numba,GaZ3ll3/numba,stuartarchibald/numba,numba/numba,cpcloud/numba,gmarkall/numba,cpcloud/numba,gdementen/numba,ssarangi/numba,seibert/numba,sklam/numba,gdementen/numba,jriehl/numba,gmarkall/numba,IntelLabs/numba,pombredanne/numba,stuartarchibald/numba,jriehl/numba,stuartarchibald/numba,gmarkall/numba,gmarkall/numba,gdementen/numba,stuartarchibald/numba,pombredanne/numba,pombredanne/numba,cpcloud/numba,numba/numba,ssarangi/numba,seibert/numba,cpcloud/numba,numba/numba,IntelLabs/numba,IntelLabs/numba,pitrou/numba,jriehl/numba,numba/numba,gmarkall/numba,jriehl/numba,ssarangi/numba,gdementen/numba,stonebig/numba,sklam/numba,gdementen/numba,IntelLabs/numba,seibert/numba,seibert/numba,stefanseefeld/numba,stefanseefeld/numba,IntelLabs/numba,GaZ3ll3/numba,stefanseefeld/numba,pombredanne/numba,jriehl/numba,ssarangi/numba,sklam/numba,numba/numba,GaZ3ll3/numba,stefanseefeld/numba,sklam/numba,ssarangi/numba,stonebig/numba,stuartarchibald/numba,stefanseefeld/numba,stonebig/numba,stonebig/numba,GaZ3ll3/numba,pitrou/numba,cpcloud/numba,stonebig/numba,seibert/numba,sklam/numba,pitrou/numba,pitrou/numba
""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys NPY = '16', '17' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main() Add option for quick test on all python version
""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys if '-q' in sys.argv[1:]: NPY = '18', else: NPY = '16', '17', '18' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main()
<commit_before>""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys NPY = '16', '17' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main() <commit_msg>Add option for quick test on all python version<commit_after>
""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys if '-q' in sys.argv[1:]: NPY = '18', else: NPY = '16', '17', '18' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main()
""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys NPY = '16', '17' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main() Add option for quick test on all python version""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys if '-q' in sys.argv[1:]: NPY = '18', else: NPY = '16', '17', '18' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main()
<commit_before>""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys NPY = '16', '17' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main() <commit_msg>Add option for quick test on all python version<commit_after>""" Uses conda to run and test all supported python + numpy versions. """ from __future__ import print_function import itertools import subprocess import os import sys if '-q' in sys.argv[1:]: NPY = '18', else: NPY = '16', '17', '18' PY = '26', '27', '33' RECIPE_DIR = "./buildscripts/condarecipe.local" def main(): failfast = '-v' in sys.argv[1:] args = "conda build %s --no-binstar-upload" % RECIPE_DIR failures = [] for py, npy in itertools.product(PY, NPY): if py == '33' and npy == '16': # Skip python3 + numpy16 continue os.environ['CONDA_PY'] = py os.environ['CONDA_NPY'] = npy try: subprocess.check_call(args.split()) except subprocess.CalledProcessError as e: failures.append((py, npy, e)) if failfast: break print("=" * 80) if failures: for py, npy, err in failures: print("Test failed for python %s numpy %s" % (py, npy)) print(err) else: print("All Passed") if __name__ == '__main__': main()
a3e537dc7e91785bb45bfe4d5a788c26d52653b1
command_line/make_sphinx_html.py
command_line/make_sphinx_html.py
# LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division from libtbx import easy_run import libtbx.load_env import os.path as op import shutil import os import sys if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = op.join(xia2_dir, "html") if op.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(op.join(xia2_dir, "doc", "sphinx")) easy_run.call("make clean") easy_run.call("make html") print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir)
# LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division import libtbx.load_env from dials.util.procrunner import run_process import shutil import os if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = os.path.join(xia2_dir, "html") if os.path.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(os.path.join(xia2_dir, "doc", "sphinx")) result = run_process(["make", "clean"]) assert result['exitcode'] == 0, \ 'make clean failed with exit code %d' % result['exitcode'] result = run_process(["make", "html"]) assert result['exitcode'] == 0, \ 'make html failed with exit code %d' % result['exitcode'] print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir)
Check make exit codes and stop on error
Check make exit codes and stop on error
Python
bsd-3-clause
xia2/xia2,xia2/xia2
# LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division from libtbx import easy_run import libtbx.load_env import os.path as op import shutil import os import sys if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = op.join(xia2_dir, "html") if op.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(op.join(xia2_dir, "doc", "sphinx")) easy_run.call("make clean") easy_run.call("make html") print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir) Check make exit codes and stop on error
# LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division import libtbx.load_env from dials.util.procrunner import run_process import shutil import os if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = os.path.join(xia2_dir, "html") if os.path.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(os.path.join(xia2_dir, "doc", "sphinx")) result = run_process(["make", "clean"]) assert result['exitcode'] == 0, \ 'make clean failed with exit code %d' % result['exitcode'] result = run_process(["make", "html"]) assert result['exitcode'] == 0, \ 'make html failed with exit code %d' % result['exitcode'] print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir)
<commit_before># LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division from libtbx import easy_run import libtbx.load_env import os.path as op import shutil import os import sys if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = op.join(xia2_dir, "html") if op.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(op.join(xia2_dir, "doc", "sphinx")) easy_run.call("make clean") easy_run.call("make html") print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir) <commit_msg>Check make exit codes and stop on error<commit_after>
# LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division import libtbx.load_env from dials.util.procrunner import run_process import shutil import os if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = os.path.join(xia2_dir, "html") if os.path.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(os.path.join(xia2_dir, "doc", "sphinx")) result = run_process(["make", "clean"]) assert result['exitcode'] == 0, \ 'make clean failed with exit code %d' % result['exitcode'] result = run_process(["make", "html"]) assert result['exitcode'] == 0, \ 'make html failed with exit code %d' % result['exitcode'] print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir)
# LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division from libtbx import easy_run import libtbx.load_env import os.path as op import shutil import os import sys if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = op.join(xia2_dir, "html") if op.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(op.join(xia2_dir, "doc", "sphinx")) easy_run.call("make clean") easy_run.call("make html") print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir) Check make exit codes and stop on error# LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division import libtbx.load_env from dials.util.procrunner import run_process import shutil import os if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = os.path.join(xia2_dir, "html") if os.path.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(os.path.join(xia2_dir, "doc", "sphinx")) result = run_process(["make", "clean"]) assert result['exitcode'] == 0, \ 'make clean failed with exit code %d' % result['exitcode'] result = run_process(["make", "html"]) assert result['exitcode'] == 0, \ 'make html failed with exit code %d' % result['exitcode'] print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir)
<commit_before># LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division from libtbx import easy_run import libtbx.load_env import os.path as op import shutil import os import sys if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = op.join(xia2_dir, "html") if op.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(op.join(xia2_dir, "doc", "sphinx")) easy_run.call("make clean") easy_run.call("make html") print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir) <commit_msg>Check make exit codes and stop on error<commit_after># LIBTBX_SET_DISPATCHER_NAME dev.xia2.make_sphinx_html from __future__ import division import libtbx.load_env from dials.util.procrunner import run_process import shutil import os if (__name__ == "__main__") : xia2_dir = libtbx.env.find_in_repositories("xia2", optional=False) assert (xia2_dir is not None) dest_dir = os.path.join(xia2_dir, "html") if os.path.exists(dest_dir): shutil.rmtree(dest_dir) os.chdir(os.path.join(xia2_dir, "doc", "sphinx")) result = run_process(["make", "clean"]) assert result['exitcode'] == 0, \ 'make clean failed with exit code %d' % result['exitcode'] result = run_process(["make", "html"]) assert result['exitcode'] == 0, \ 'make html failed with exit code %d' % result['exitcode'] print "Moving HTML pages to", dest_dir shutil.move("build/html", dest_dir)
72d45d64cace23950ef32e670e074ab45ec4d25b
designatedashboard/enabled/_1720_project_dns_panel.py
designatedashboard/enabled/_1720_project_dns_panel.py
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains')
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_INSTALLED_APPS = ['designatedashboard'] ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains')
Add ADD_INSTALLED_APPS to 'enabled' file
Add ADD_INSTALLED_APPS to 'enabled' file Django looks for translation catalogs from directories in INSTALLED_APPS. To display translations for designate-dashboard, 'designatedashboard' needs to be registered to INSTALLED_APPS. (cherry picked from commit 1ed7893eb2ae10172a2f664fc05428c28c29099e) Change-Id: Id5f0f0cb9cba455fededa622da04ed7bee313218 Closes-Bug: #1561202
Python
apache-2.0
openstack/designate-dashboard,openstack/designate-dashboard,openstack/designate-dashboard
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains') Add ADD_INSTALLED_APPS to 'enabled' file Django looks for translation catalogs from directories in INSTALLED_APPS. To display translations for designate-dashboard, 'designatedashboard' needs to be registered to INSTALLED_APPS. (cherry picked from commit 1ed7893eb2ae10172a2f664fc05428c28c29099e) Change-Id: Id5f0f0cb9cba455fededa622da04ed7bee313218 Closes-Bug: #1561202
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_INSTALLED_APPS = ['designatedashboard'] ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains')
<commit_before># Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains') <commit_msg>Add ADD_INSTALLED_APPS to 'enabled' file Django looks for translation catalogs from directories in INSTALLED_APPS. To display translations for designate-dashboard, 'designatedashboard' needs to be registered to INSTALLED_APPS. (cherry picked from commit 1ed7893eb2ae10172a2f664fc05428c28c29099e) Change-Id: Id5f0f0cb9cba455fededa622da04ed7bee313218 Closes-Bug: #1561202<commit_after>
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_INSTALLED_APPS = ['designatedashboard'] ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains')
# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains') Add ADD_INSTALLED_APPS to 'enabled' file Django looks for translation catalogs from directories in INSTALLED_APPS. To display translations for designate-dashboard, 'designatedashboard' needs to be registered to INSTALLED_APPS. (cherry picked from commit 1ed7893eb2ae10172a2f664fc05428c28c29099e) Change-Id: Id5f0f0cb9cba455fededa622da04ed7bee313218 Closes-Bug: #1561202# Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_INSTALLED_APPS = ['designatedashboard'] ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains')
<commit_before># Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains') <commit_msg>Add ADD_INSTALLED_APPS to 'enabled' file Django looks for translation catalogs from directories in INSTALLED_APPS. To display translations for designate-dashboard, 'designatedashboard' needs to be registered to INSTALLED_APPS. (cherry picked from commit 1ed7893eb2ae10172a2f664fc05428c28c29099e) Change-Id: Id5f0f0cb9cba455fededa622da04ed7bee313218 Closes-Bug: #1561202<commit_after># Copyright 2013 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from designatedashboard import exceptions # The name of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'domains' # The name of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The name of the panel group the PANEL is associated with. PANEL_GROUP = 'dns' ADD_INSTALLED_APPS = ['designatedashboard'] ADD_EXCEPTIONS = { 'recoverable': exceptions.RECOVERABLE, 'not_found': exceptions.NOT_FOUND, 'unauthorized': exceptions.UNAUTHORIZED, } # Python panel class of the PANEL to be added. ADD_PANEL = ( 'designatedashboard.dashboards.project.dns_domains.panel.DNSDomains')
14b0ddc6fccf54a430caffdb10bad3a8cbbd2bc1
ereuse_devicehub/scripts/updates/snapshot_software.py
ereuse_devicehub/scripts/updates/snapshot_software.py
from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}})
from contextlib import suppress from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): with suppress(KeyError): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}})
Fix getting snapshotsoftware on old snapshots
Fix getting snapshotsoftware on old snapshots
Python
agpl-3.0
eReuse/DeviceHub,eReuse/DeviceHub
from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}}) Fix getting snapshotsoftware on old snapshots
from contextlib import suppress from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): with suppress(KeyError): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}})
<commit_before>from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}}) <commit_msg>Fix getting snapshotsoftware on old snapshots<commit_after>
from contextlib import suppress from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): with suppress(KeyError): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}})
from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}}) Fix getting snapshotsoftware on old snapshotsfrom contextlib import suppress from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): with suppress(KeyError): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}})
<commit_before>from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}}) <commit_msg>Fix getting snapshotsoftware on old snapshots<commit_after>from contextlib import suppress from pydash import find from ereuse_devicehub.resources.device.domain import DeviceDomain from ereuse_devicehub.resources.event.device import DeviceEventDomain from ereuse_devicehub.scripts.updates.update import Update class SnapshotSoftware(Update): """ Changes the values of SnapshotSoftware and adds it to the materialized one in devices """ def execute(self, database): SNAPSHOT_SOFTWARE = { 'DDI': 'Workbench', 'Scan': 'AndroidApp', 'DeviceHubClient': 'Web' } for snapshot in DeviceEventDomain.get({'@type': "devices:Snapshot"}): with suppress(KeyError): snapshot['snapshotSoftware'] = SNAPSHOT_SOFTWARE[snapshot.get('snapshotSoftware', 'DDI')] DeviceEventDomain.update_one_raw(snapshot['_id'], {'$set': {'snapshotSoftware': snapshot['snapshotSoftware']}}) for device in DeviceDomain.get({'events._id': snapshot['_id']}): materialized_snapshot = find(device['events'], lambda event: event['_id'] == snapshot['_id']) materialized_snapshot['snapshotSoftware'] = snapshot['snapshotSoftware'] DeviceDomain.update_one_raw(device['_id'], {'$set': {'events': device['events']}})
ae89d2c6a93929ea77fdd1cf0c7685c75f84fd54
roman-numerals/roman_numerals.py
roman-numerals/roman_numerals.py
# File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def roman_numerals(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman)
# File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def numeral(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman)
Rectify AttributeError: 'module' object has no attribute 'numeral'
Rectify AttributeError: 'module' object has no attribute 'numeral'
Python
mit
amalshehu/exercism-python
# File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def roman_numerals(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman) Rectify AttributeError: 'module' object has no attribute 'numeral'
# File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def numeral(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman)
<commit_before># File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def roman_numerals(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman) <commit_msg>Rectify AttributeError: 'module' object has no attribute 'numeral'<commit_after>
# File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def numeral(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman)
# File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def roman_numerals(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman) Rectify AttributeError: 'module' object has no attribute 'numeral'# File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def numeral(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman)
<commit_before># File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def roman_numerals(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman) <commit_msg>Rectify AttributeError: 'module' object has no attribute 'numeral'<commit_after># File: roman_numerals.py # Purpose: Function to convert from normal numbers to Roman Numerals. # Programmer: Amal Shehu # Course: Exercism # Date: Saturday 17 September 2016, 03:30 PM numerals = tuple(zip( (1000, 900, 500, 400, 100, 90, 50, 40, 10, 9, 5, 4, 1), ('M', 'CM', 'D', 'CD', 'C', 'XC', 'L', 'XL', 'X', 'IX', 'V', 'IV', 'I') )) def numeral(num): roman = [] for number, numeral in numerals: swing = num // number roman.append(numeral * swing) num -= number * swing return ''.join(roman)
35325168839234efe98a927fda76548de553d666
test/test_pipeline.py
test/test_pipeline.py
from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): t=Pipeline(example0_ppl) def test_example1(): t=Pipeline(example1_ppl)
from pytest import raises from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): with raises(PypeSyntaxError): t=Pipeline(example0_ppl) def test_example1(): with raises(PypeSyntaxError): t=Pipeline(example1_ppl)
Raise pypeSyntaxError in pype test
Raise pypeSyntaxError in pype test
Python
mit
cs207-project/TimeSeries,cs207-project/TimeSeries,cs207-project/TimeSeries,cs207-project/TimeSeries
from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): t=Pipeline(example0_ppl) def test_example1(): t=Pipeline(example1_ppl) Raise pypeSyntaxError in pype test
from pytest import raises from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): with raises(PypeSyntaxError): t=Pipeline(example0_ppl) def test_example1(): with raises(PypeSyntaxError): t=Pipeline(example1_ppl)
<commit_before>from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): t=Pipeline(example0_ppl) def test_example1(): t=Pipeline(example1_ppl) <commit_msg>Raise pypeSyntaxError in pype test<commit_after>
from pytest import raises from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): with raises(PypeSyntaxError): t=Pipeline(example0_ppl) def test_example1(): with raises(PypeSyntaxError): t=Pipeline(example1_ppl)
from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): t=Pipeline(example0_ppl) def test_example1(): t=Pipeline(example1_ppl) Raise pypeSyntaxError in pype testfrom pytest import raises from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): with raises(PypeSyntaxError): t=Pipeline(example0_ppl) def test_example1(): with raises(PypeSyntaxError): t=Pipeline(example1_ppl)
<commit_before>from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): t=Pipeline(example0_ppl) def test_example1(): t=Pipeline(example1_ppl) <commit_msg>Raise pypeSyntaxError in pype test<commit_after>from pytest import raises from pype.lexer import lexer from pype.pipeline import Pipeline example_error_ppl='test/samples/example_error.ppl' example0_ppl='test/samples/example0.ppl' example0_token='test/samples/example0.tokens' example1_ppl='test/samples/example1.ppl' example1_token='test/samples/example1.tokens' def test_lexer(): lexer.input(open(example1_ppl).read()) output=open(example1_token) for token, line in zip(lexer, output): assert str(token) == line.strip() lexer.input(open(example_error_ppl).read()) for token in lexer: print (token) def test_example0(): with raises(PypeSyntaxError): t=Pipeline(example0_ppl) def test_example1(): with raises(PypeSyntaxError): t=Pipeline(example1_ppl)
ee68ed703786afadface47a3f276cefae17c583d
test/unit/conftest.py
test/unit/conftest.py
# pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = ["test_semihosting.py"]
# pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = [ "test_semihosting.py", "test_pack.py" ]
Disable test_pack.py unit test until it works on Travis-CI and for Python 2.7.
Disable test_pack.py unit test until it works on Travis-CI and for Python 2.7.
Python
apache-2.0
pyocd/pyOCD,mbedmicro/pyOCD,mesheven/pyOCD,mbedmicro/pyOCD,flit/pyOCD,mesheven/pyOCD,flit/pyOCD,pyocd/pyOCD,mbedmicro/pyOCD,mesheven/pyOCD
# pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = ["test_semihosting.py"] Disable test_pack.py unit test until it works on Travis-CI and for Python 2.7.
# pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = [ "test_semihosting.py", "test_pack.py" ]
<commit_before># pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = ["test_semihosting.py"] <commit_msg>Disable test_pack.py unit test until it works on Travis-CI and for Python 2.7.<commit_after>
# pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = [ "test_semihosting.py", "test_pack.py" ]
# pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = ["test_semihosting.py"] Disable test_pack.py unit test until it works on Travis-CI and for Python 2.7.# pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = [ "test_semihosting.py", "test_pack.py" ]
<commit_before># pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = ["test_semihosting.py"] <commit_msg>Disable test_pack.py unit test until it works on Travis-CI and for Python 2.7.<commit_after># pyOCD debugger # Copyright (c) 2016 Arm Limited # SPDX-License-Identifier: Apache-2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pytest import logging from .mockcore import MockCore @pytest.fixture(scope='function') def mockcore(): return MockCore() # Ignore semihosting test that currently crashes on Travis collect_ignore = [ "test_semihosting.py", "test_pack.py" ]
000bcc94cd97de849c3989e69bc5006de130b01e
tests/test_bgw_dl.py
tests/test_bgw_dl.py
# -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw-dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass
# -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw_dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass
Replace hyphen on test class name
Replace hyphen on test class name
Python
mit
vonpupp/bgw-dl
# -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw-dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass Replace hyphen on test class name
# -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw_dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass
<commit_before># -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw-dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass <commit_msg>Replace hyphen on test class name<commit_after>
# -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw_dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass
# -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw-dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass Replace hyphen on test class name# -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw_dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass
<commit_before># -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw-dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass <commit_msg>Replace hyphen on test class name<commit_after># -*- coding: utf-8 -*- """ test_bgw_dl ---------------------------------- Tests for `bgw_dl` module. """ import pytest import bgw_dl class TestBgw_dl(object): @classmethod def set_up(self): pass def test_something(self): pass @classmethod def tear_down(self): pass
d84a47b875af42da3491c771e461b0a8ca5556db
tests/test_models.py
tests/test_models.py
import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload)
import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) @pytest.mark.django_db def test_tinycontentfile_slug(file_upload): assert "foobar" == file_upload.slug
Test the slug field is generated correctly
Test the slug field is generated correctly
Python
bsd-3-clause
dominicrodger/django-tinycontent,ad-m/django-tinycontent,watchdogpolska/django-tinycontent,ad-m/django-tinycontent,dominicrodger/django-tinycontent,watchdogpolska/django-tinycontent
import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) Test the slug field is generated correctly
import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) @pytest.mark.django_db def test_tinycontentfile_slug(file_upload): assert "foobar" == file_upload.slug
<commit_before>import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) <commit_msg>Test the slug field is generated correctly<commit_after>
import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) @pytest.mark.django_db def test_tinycontentfile_slug(file_upload): assert "foobar" == file_upload.slug
import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) Test the slug field is generated correctlyimport pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) @pytest.mark.django_db def test_tinycontentfile_slug(file_upload): assert "foobar" == file_upload.slug
<commit_before>import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) <commit_msg>Test the slug field is generated correctly<commit_after>import pytest @pytest.mark.django_db def test_tinycontent_str(simple_content): assert "foobar" == str(simple_content) @pytest.mark.django_db def test_tinycontentfile_str(file_upload): assert "Foobar" == str(file_upload) @pytest.mark.django_db def test_tinycontentfile_slug(file_upload): assert "foobar" == file_upload.slug
ea2383175456257384e625bb1113d98536b78a92
tests/test_shutil.py
tests/test_shutil.py
#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r class CopyRTest(unittest.TestCase): def setUp(self): os.mkdir("cpr_src") with open(os.path.join("cpr_src", "test"), "w") as f: f.write("what") def test_recursive_copy(self): copy_r(".", "cpr_dst") self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", "test"))) def tearDown(self): shutil.rmtree("cpr_src") shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r # class CopyRTest(unittest.TestCase): # # def setUp(self): # os.mkdir("cpr_src") # with open(os.path.join("cpr_src", "test"), "w") as f: # f.write("what") # # def test_recursive_copy(self): # copy_r(".", "cpr_dst") # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", # "test"))) # # def tearDown(self): # shutil.rmtree("cpr_src") # shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
Comment out CopyR test for now.
Comment out CopyR test for now.
Python
mit
yanikou19/monty,davidwaroquiers/monty,materialsvirtuallab/monty,davidwaroquiers/monty,materialsvirtuallab/monty,gpetretto/monty,gmatteo/monty,gmatteo/monty
#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r class CopyRTest(unittest.TestCase): def setUp(self): os.mkdir("cpr_src") with open(os.path.join("cpr_src", "test"), "w") as f: f.write("what") def test_recursive_copy(self): copy_r(".", "cpr_dst") self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", "test"))) def tearDown(self): shutil.rmtree("cpr_src") shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main() Comment out CopyR test for now.
#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r # class CopyRTest(unittest.TestCase): # # def setUp(self): # os.mkdir("cpr_src") # with open(os.path.join("cpr_src", "test"), "w") as f: # f.write("what") # # def test_recursive_copy(self): # copy_r(".", "cpr_dst") # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", # "test"))) # # def tearDown(self): # shutil.rmtree("cpr_src") # shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
<commit_before>#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r class CopyRTest(unittest.TestCase): def setUp(self): os.mkdir("cpr_src") with open(os.path.join("cpr_src", "test"), "w") as f: f.write("what") def test_recursive_copy(self): copy_r(".", "cpr_dst") self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", "test"))) def tearDown(self): shutil.rmtree("cpr_src") shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main() <commit_msg>Comment out CopyR test for now.<commit_after>
#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r # class CopyRTest(unittest.TestCase): # # def setUp(self): # os.mkdir("cpr_src") # with open(os.path.join("cpr_src", "test"), "w") as f: # f.write("what") # # def test_recursive_copy(self): # copy_r(".", "cpr_dst") # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", # "test"))) # # def tearDown(self): # shutil.rmtree("cpr_src") # shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r class CopyRTest(unittest.TestCase): def setUp(self): os.mkdir("cpr_src") with open(os.path.join("cpr_src", "test"), "w") as f: f.write("what") def test_recursive_copy(self): copy_r(".", "cpr_dst") self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", "test"))) def tearDown(self): shutil.rmtree("cpr_src") shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main() Comment out CopyR test for now.#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r # class CopyRTest(unittest.TestCase): # # def setUp(self): # os.mkdir("cpr_src") # with open(os.path.join("cpr_src", "test"), "w") as f: # f.write("what") # # def test_recursive_copy(self): # copy_r(".", "cpr_dst") # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", # "test"))) # # def tearDown(self): # shutil.rmtree("cpr_src") # shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
<commit_before>#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r class CopyRTest(unittest.TestCase): def setUp(self): os.mkdir("cpr_src") with open(os.path.join("cpr_src", "test"), "w") as f: f.write("what") def test_recursive_copy(self): copy_r(".", "cpr_dst") self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", "test"))) def tearDown(self): shutil.rmtree("cpr_src") shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main() <commit_msg>Comment out CopyR test for now.<commit_after>#!/usr/bin/env python __author__ = 'Shyue Ping Ong' __copyright__ = 'Copyright 2014, The Materials Virtual Lab' __version__ = '0.1' __maintainer__ = 'Shyue Ping Ong' __email__ = 'ongsp@ucsd.edu' __date__ = '1/24/14' import unittest import os import shutil from monty.shutil import copy_r # class CopyRTest(unittest.TestCase): # # def setUp(self): # os.mkdir("cpr_src") # with open(os.path.join("cpr_src", "test"), "w") as f: # f.write("what") # # def test_recursive_copy(self): # copy_r(".", "cpr_dst") # self.assertTrue(os.path.exists(os.path.join("cpr_dst", "cpr_src", # "test"))) # # def tearDown(self): # shutil.rmtree("cpr_src") # shutil.rmtree("cpr_dst") if __name__ == "__main__": unittest.main()
0cc408e04c0f321bf486d6063b11e9b0762ef8fc
tests/test_tokens.py
tests/test_tokens.py
""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__str__() == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__repr__() == expected_result
""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert str(token) == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert repr(token) == expected_result
Use str and repr functions instead of magic methods
Use str and repr functions instead of magic methods
Python
isc
bike-barn/red-green-refactor
""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__str__() == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__repr__() == expected_result Use str and repr functions instead of magic methods
""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert str(token) == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert repr(token) == expected_result
<commit_before>""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__str__() == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__repr__() == expected_result <commit_msg>Use str and repr functions instead of magic methods<commit_after>
""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert str(token) == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert repr(token) == expected_result
""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__str__() == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__repr__() == expected_result Use str and repr functions instead of magic methods""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert str(token) == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert repr(token) == expected_result
<commit_before>""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__str__() == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert token.__repr__() == expected_result <commit_msg>Use str and repr functions instead of magic methods<commit_after>""" NOTE: There are no tests that check for data validation at this point since the interpreter doesn't have any data validation as a feature. """ import pytest from calc import INTEGER, Token def test_no_defaults(): # There's no valid defaults at the moment. with pytest.raises(TypeError): Token() def test_known_type(): # There's no valid defaults at the moment. token = Token(type=INTEGER, value=2) assert token.value == 2 assert token.type == INTEGER def test_str_non_string_value(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert str(token) == expected_result def test_repr(): token = Token(type=INTEGER, value=2) expected_result = "Token(type=INTEGER, value=2)" assert repr(token) == expected_result
6c4883d6e4e65c9d6618244d821ca44c59ca5d58
tests/test_prepare.py
tests/test_prepare.py
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): st = await self.con.prepare(''' SELECT CASE WHEN $1::text IS NULL THEN 'NULL' ELSE $1::text END''') self.assertEqual((await st.execute('aaa'))[0][0], 'aaa') self.assertEqual((await st.execute(None))[0][0], 'NULL')
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = (await st.execute(val))[0][0] if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val)
Test that we handle None->NULL conversion for TEXT and BINARY
tests: Test that we handle None->NULL conversion for TEXT and BINARY
Python
apache-2.0
MagicStack/asyncpg,MagicStack/asyncpg
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): st = await self.con.prepare(''' SELECT CASE WHEN $1::text IS NULL THEN 'NULL' ELSE $1::text END''') self.assertEqual((await st.execute('aaa'))[0][0], 'aaa') self.assertEqual((await st.execute(None))[0][0], 'NULL') tests: Test that we handle None->NULL conversion for TEXT and BINARY
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = (await st.execute(val))[0][0] if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val)
<commit_before>from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): st = await self.con.prepare(''' SELECT CASE WHEN $1::text IS NULL THEN 'NULL' ELSE $1::text END''') self.assertEqual((await st.execute('aaa'))[0][0], 'aaa') self.assertEqual((await st.execute(None))[0][0], 'NULL') <commit_msg>tests: Test that we handle None->NULL conversion for TEXT and BINARY<commit_after>
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = (await st.execute(val))[0][0] if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val)
from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): st = await self.con.prepare(''' SELECT CASE WHEN $1::text IS NULL THEN 'NULL' ELSE $1::text END''') self.assertEqual((await st.execute('aaa'))[0][0], 'aaa') self.assertEqual((await st.execute(None))[0][0], 'NULL') tests: Test that we handle None->NULL conversion for TEXT and BINARYfrom asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = (await st.execute(val))[0][0] if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val)
<commit_before>from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): st = await self.con.prepare(''' SELECT CASE WHEN $1::text IS NULL THEN 'NULL' ELSE $1::text END''') self.assertEqual((await st.execute('aaa'))[0][0], 'aaa') self.assertEqual((await st.execute(None))[0][0], 'NULL') <commit_msg>tests: Test that we handle None->NULL conversion for TEXT and BINARY<commit_after>from asyncpg import _testbase as tb class TestPrepare(tb.ConnectedTestCase): async def test_prepare_1(self): st = await self.con.prepare('SELECT 1 = $1 AS test') rec = (await st.execute(1))[0] self.assertTrue(rec['test']) self.assertEqual(len(rec), 1) self.assertEqual(tuple(rec), (True,)) self.assertEqual(False, (await st.execute(10))[0][0]) async def test_prepare_2(self): with self.assertRaisesRegex(Exception, 'column "a" does not exist'): await self.con.prepare('SELECT a') async def test_prepare_3(self): cases = [ ('text', ("'NULL'", 'NULL'), [ 'aaa', None ]), ('decimal', ('0', 0), [ 123, 123.5, None ]) ] for type, (none_name, none_val), vals in cases: st = await self.con.prepare(''' SELECT CASE WHEN $1::{type} IS NULL THEN {default} ELSE $1::{type} END'''.format( type=type, default=none_name)) for val in vals: with self.subTest(type=type, value=val): res = (await st.execute(val))[0][0] if val is None: self.assertEqual(res, none_val) else: self.assertEqual(res, val)
a0392d693c238cb4548fa6aa2b7f10b6c818b648
currencies/utils.py
currencies/utils.py
from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise Currency.DoesNotExist else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
Simplify a raise in debug mode
Simplify a raise in debug mode
Python
bsd-3-clause
marcosalcazar/django-currencies,jmp0xf/django-currencies,bashu/django-simple-currencies,mysociety/django-currencies,mysociety/django-currencies,racitup/django-currencies,panosl/django-currencies,ydaniv/django-currencies,pathakamit88/django-currencies,marcosalcazar/django-currencies,racitup/django-currencies,bashu/django-simple-currencies,ydaniv/django-currencies,pathakamit88/django-currencies,panosl/django-currencies,barseghyanartur/django-currencies
from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise Currency.DoesNotExist else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP) Simplify a raise in debug mode
from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
<commit_before>from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise Currency.DoesNotExist else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP) <commit_msg>Simplify a raise in debug mode<commit_after>
from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise Currency.DoesNotExist else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP) Simplify a raise in debug modefrom decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
<commit_before>from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise Currency.DoesNotExist else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP) <commit_msg>Simplify a raise in debug mode<commit_after>from decimal import * from django.conf import settings from currencies.models import Currency def calculate_price(price, currency): try: factor = Currency.objects.get(code__exact=currency).factor except Currency.DoesNotExist: if settings.DEBUG: raise else: factor = Decimal('0.0') new_price = Decimal(price) * factor return new_price.quantize(Decimal('.01'), rounding=ROUND_UP)
37eb125c2b68c0c0c271325aab9cb4863dc6ea55
cte-collation-poc/extractmath.py
cte-collation-poc/extractmath.py
#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) for c in body.iterchildren(): body.remove(c) for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out)
#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) body.clear() for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out)
Use clear() instead of manually deleting all elements in the body
Use clear() instead of manually deleting all elements in the body
Python
lgpl-2.1
Connexions/cnx-recipes,Connexions/cnx-recipes,Connexions/cnx-recipes,Connexions/cnx-rulesets,Connexions/cnx-rulesets,Connexions/cnx-rulesets,Connexions/cnx-recipes,Connexions/cnx-recipes,Connexions/cnx-rulesets,Connexions/cte,Connexions/cte
#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) for c in body.iterchildren(): body.remove(c) for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out) Use clear() instead of manually deleting all elements in the body
#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) body.clear() for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out)
<commit_before>#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) for c in body.iterchildren(): body.remove(c) for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out) <commit_msg>Use clear() instead of manually deleting all elements in the body<commit_after>
#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) body.clear() for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out)
#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) for c in body.iterchildren(): body.remove(c) for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out) Use clear() instead of manually deleting all elements in the body#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) body.clear() for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out)
<commit_before>#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) for c in body.iterchildren(): body.remove(c) for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out) <commit_msg>Use clear() instead of manually deleting all elements in the body<commit_after>#!/usr/bin/env python from __future__ import print_function import sys import argparse from lxml import etree NS = {'x': 'http://www.w3.org/1999/xhtml', 'mml':'http://www.w3.org/1998/Math/MathML'} body_xpath = etree.XPath('//x:body', namespaces=NS) math_xpath = etree.XPath("//mml:math", namespaces=NS) def main(html_in, html_out=sys.stdout): """Extract math nodes from book html file""" html = etree.parse(html_in) body = body_xpath(html)[0] math = math_xpath(body) body.clear() for m in math: body.append(m) print(etree.tostring(html), file=html_out) if __name__ == '__main__': parser = argparse.ArgumentParser(description="Extract only math nodes from book html file") parser.add_argument("html_in", type=argparse.FileType('r'), help="assembled fullbook HTML file") parser.add_argument("html_out", nargs="?", type=argparse.FileType('w'), help="math-only HTML file output (default stdout)", default=sys.stdout) args = parser.parse_args() main(args.html_in, args.html_out)