commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
5875baf754d3bcc911f828fc3ecb302ac6da967f
tagcache/lock.py
tagcache/lock.py
# -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None
# -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None @property def is_acquired(self): return self.fd is not None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None
Add `is_acquired` property to FileLock
Add `is_acquired` property to FileLock
Python
mit
huangjunwen/tagcache
# -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None Add `is_acquired` property to FileLock
# -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None @property def is_acquired(self): return self.fd is not None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None
<commit_before># -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None <commit_msg>Add `is_acquired` property to FileLock<commit_after>
# -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None @property def is_acquired(self): return self.fd is not None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None
# -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None Add `is_acquired` property to FileLock# -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None @property def is_acquired(self): return self.fd is not None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None
<commit_before># -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None <commit_msg>Add `is_acquired` property to FileLock<commit_after># -*- encoding: utf-8 -*- import os import fcntl from tagcache.utils import open_file class FileLock(object): def __init__(self, path): self.path = path self.fd = None @property def is_acquired(self): return self.fd is not None def acquire(self, ex=False, nb=False): """ Acquire a lock on a path. :param ex (optional): default False, acquire a exclusive lock if True :param nb (optional): default False, non blocking if True :return: True on success :raise: raise RuntimeError if a lock has been acquired """ if self.fd is not None: raise RuntimeError("A lock has been held") try: # open or create the lock file self.fd = open_file(self.path, os.O_RDWR|os.O_CREAT) lock_flags = fcntl.LOCK_EX if ex else fcntl.LOCK_SH if nb: lock_flags |= fcntl.LOCK_NB fcntl.flock(self.fd, lock_flags) return True except Exception, e: if self.fd is not None: os.close(self.fd) self.fd = None return False def release(self): """ Release the lock. """ if self.fd is None: return fcntl.flock(self.fd, fcntl.LOCK_UN) self.fd = None
a56108990e2cda8694f7b5c4fe3c615966c4cd6c
python/powers_of_two.py
python/powers_of_two.py
def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism print(type(powers_of_two)) # <class 'function'> g = powers_of_two(100) print(type(g)) # <class 'generator'> print(g.__next__()) # 1 print(g.__next__()) # 2 print(next(g)) # 4
def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism g = powers_of_two(100) assert(str(type(powers_of_two)) == "<class 'function'>") assert(str(type(g)) == "<class 'generator'>") assert(g.__next__() == 1) assert(g.__next__() == 2) assert(next(g) == 4)
Use asserts instead of prints
Use asserts instead of prints
Python
mit
rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple
def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism print(type(powers_of_two)) # <class 'function'> g = powers_of_two(100) print(type(g)) # <class 'generator'> print(g.__next__()) # 1 print(g.__next__()) # 2 print(next(g)) # 4 Use asserts instead of prints
def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism g = powers_of_two(100) assert(str(type(powers_of_two)) == "<class 'function'>") assert(str(type(g)) == "<class 'generator'>") assert(g.__next__() == 1) assert(g.__next__() == 2) assert(next(g) == 4)
<commit_before>def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism print(type(powers_of_two)) # <class 'function'> g = powers_of_two(100) print(type(g)) # <class 'generator'> print(g.__next__()) # 1 print(g.__next__()) # 2 print(next(g)) # 4 <commit_msg>Use asserts instead of prints<commit_after>
def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism g = powers_of_two(100) assert(str(type(powers_of_two)) == "<class 'function'>") assert(str(type(g)) == "<class 'generator'>") assert(g.__next__() == 1) assert(g.__next__() == 2) assert(next(g) == 4)
def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism print(type(powers_of_two)) # <class 'function'> g = powers_of_two(100) print(type(g)) # <class 'generator'> print(g.__next__()) # 1 print(g.__next__()) # 2 print(next(g)) # 4 Use asserts instead of printsdef powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism g = powers_of_two(100) assert(str(type(powers_of_two)) == "<class 'function'>") assert(str(type(g)) == "<class 'generator'>") assert(g.__next__() == 1) assert(g.__next__() == 2) assert(next(g) == 4)
<commit_before>def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism print(type(powers_of_two)) # <class 'function'> g = powers_of_two(100) print(type(g)) # <class 'generator'> print(g.__next__()) # 1 print(g.__next__()) # 2 print(next(g)) # 4 <commit_msg>Use asserts instead of prints<commit_after>def powers_of_two(limit): value = 1 while value < limit: yield value value += value # Use the generator for i in powers_of_two(70): print(i) # Explore the mechanism g = powers_of_two(100) assert(str(type(powers_of_two)) == "<class 'function'>") assert(str(type(g)) == "<class 'generator'>") assert(g.__next__() == 1) assert(g.__next__() == 2) assert(next(g) == 4)
55a1f6197800249b3ad13ec7c5358e907ea04c46
comics/comics/treadingground.py
comics/comics/treadingground.py
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): history_capable_days = 30 schedule = 'Mo,We,Fr' time_zone = -5 def crawl(self, pub_date): pass
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): schedule = None def crawl(self, pub_date): pass # Comic no longer published
Remove schedule for ended comic
Remove schedule for ended comic
Python
agpl-3.0
klette/comics,datagutten/comics,klette/comics,jodal/comics,datagutten/comics,klette/comics,jodal/comics,jodal/comics,jodal/comics,datagutten/comics,datagutten/comics
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): history_capable_days = 30 schedule = 'Mo,We,Fr' time_zone = -5 def crawl(self, pub_date): pass Remove schedule for ended comic
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): schedule = None def crawl(self, pub_date): pass # Comic no longer published
<commit_before>from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): history_capable_days = 30 schedule = 'Mo,We,Fr' time_zone = -5 def crawl(self, pub_date): pass <commit_msg>Remove schedule for ended comic<commit_after>
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): schedule = None def crawl(self, pub_date): pass # Comic no longer published
from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): history_capable_days = 30 schedule = 'Mo,We,Fr' time_zone = -5 def crawl(self, pub_date): pass Remove schedule for ended comicfrom comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): schedule = None def crawl(self, pub_date): pass # Comic no longer published
<commit_before>from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): history_capable_days = 30 schedule = 'Mo,We,Fr' time_zone = -5 def crawl(self, pub_date): pass <commit_msg>Remove schedule for ended comic<commit_after>from comics.aggregator.crawler import CrawlerBase, CrawlerImage from comics.meta.base import MetaBase class Meta(MetaBase): name = 'Treading Ground' language = 'en' url = 'http://www.treadingground.com/' start_date = '2003-10-12' rights = 'Nick Wright' class Crawler(CrawlerBase): schedule = None def crawl(self, pub_date): pass # Comic no longer published
0795ffe195798461961fc41329fb7df30ec429c3
lisa/server/tests/test_plugins.py
lisa/server/tests/test_plugins.py
from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="ChatterBot", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success")
from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success")
Test with a dedicated plugin now
Test with a dedicated plugin now
Python
mit
Seraf/LISA,Seraf/LISA,Seraf/LISA,Seraf/LISA
from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="ChatterBot", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") Test with a dedicated plugin now
from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success")
<commit_before>from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="ChatterBot", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") <commit_msg>Test with a dedicated plugin now<commit_after>
from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success")
from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="ChatterBot", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") Test with a dedicated plugin nowfrom lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success")
<commit_before>from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="ChatterBot", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="ChatterBot") self.assertEqual(answer['status'], "success") <commit_msg>Test with a dedicated plugin now<commit_after>from lisa.server.plugins.PluginManager import PluginManagerSingleton from twisted.trial import unittest import json class LisaClientTestCase_Plugin(unittest.TestCase): def setUp(self): self.pluginManager = PluginManagerSingleton.get() def test_a_install_plugin(self): answer = self.pluginManager.installPlugin(plugin_name="UnitTest", test_mode=True) self.assertEqual(answer['status'], "success") def test_b_disable_plugin(self): answer = self.pluginManager.disablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_c_enable_plugin(self): answer = self.pluginManager.enablePlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success") def test_d_uninstall_plugin(self): answer = self.pluginManager.uninstallPlugin(plugin_name="UnitTest") self.assertEqual(answer['status'], "success")
d4fc34ea4635ee4ec294e1eb52fcd83174dd52c5
steve/_version.py
steve/_version.py
####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### __version__ = '0.1' __releasedate__ = ''
####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### # See http://www.python.org/dev/peps/pep-0386/ # Examples: # * 1.0.dev # * 1.0a2 # * 1.0b2 # * 1.0 __version__ = '0.1.dev' __releasedate__ = ''
Add version number notes; set to dev
Add version number notes; set to dev
Python
bsd-2-clause
willkg/steve,pyvideo/steve,CarlFK/steve,willkg/steve,pyvideo/steve,CarlFK/steve,willkg/steve,CarlFK/steve,pyvideo/steve
####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### __version__ = '0.1' __releasedate__ = '' Add version number notes; set to dev
####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### # See http://www.python.org/dev/peps/pep-0386/ # Examples: # * 1.0.dev # * 1.0a2 # * 1.0b2 # * 1.0 __version__ = '0.1.dev' __releasedate__ = ''
<commit_before>####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### __version__ = '0.1' __releasedate__ = '' <commit_msg>Add version number notes; set to dev<commit_after>
####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### # See http://www.python.org/dev/peps/pep-0386/ # Examples: # * 1.0.dev # * 1.0a2 # * 1.0b2 # * 1.0 __version__ = '0.1.dev' __releasedate__ = ''
####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### __version__ = '0.1' __releasedate__ = '' Add version number notes; set to dev####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### # See http://www.python.org/dev/peps/pep-0386/ # Examples: # * 1.0.dev # * 1.0a2 # * 1.0b2 # * 1.0 __version__ = '0.1.dev' __releasedate__ = ''
<commit_before>####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### __version__ = '0.1' __releasedate__ = '' <commit_msg>Add version number notes; set to dev<commit_after>####################################################################### # This file is part of steve. # # Copyright (C) 2012 Will Kahn-Greene # # steve is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # steve is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with steve. If not, see <http://www.gnu.org/licenses/>. ####################################################################### # See http://www.python.org/dev/peps/pep-0386/ # Examples: # * 1.0.dev # * 1.0a2 # * 1.0b2 # * 1.0 __version__ = '0.1.dev' __releasedate__ = ''
ae5dab44cd1dc921398ec242d0718bbcebc09f37
test/test_pudl.py
test/test_pudl.py
"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2014, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine)
"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2011, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine)
Expand default test years for PUDL to 2011-2015
Expand default test years for PUDL to 2011-2015 Tests successfully passed for all EIA923 tables ingesting from 2011 through 2015 (2016 still needs some id_mapping or exhaustie ID love).
Python
mit
catalyst-cooperative/pudl,catalyst-cooperative/pudl
"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2014, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine) Expand default test years for PUDL to 2011-2015 Tests successfully passed for all EIA923 tables ingesting from 2011 through 2015 (2016 still needs some id_mapping or exhaustie ID love).
"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2011, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine)
<commit_before>"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2014, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine) <commit_msg>Expand default test years for PUDL to 2011-2015 Tests successfully passed for all EIA923 tables ingesting from 2011 through 2015 (2016 still needs some id_mapping or exhaustie ID love).<commit_after>
"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2011, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine)
"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2014, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine) Expand default test years for PUDL to 2011-2015 Tests successfully passed for all EIA923 tables ingesting from 2011 through 2015 (2016 still needs some id_mapping or exhaustie ID love)."""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2011, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine)
<commit_before>"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2014, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine) <commit_msg>Expand default test years for PUDL to 2011-2015 Tests successfully passed for all EIA923 tables ingesting from 2011 through 2015 (2016 still needs some id_mapping or exhaustie ID love).<commit_after>"""Tests excercising the pudl module for use with PyTest.""" import pytest import pudl.pudl import pudl.ferc1 import pudl.constants as pc def test_init_db(): """Create a fresh PUDL DB and pull in some FERC1 & EIA923 data.""" pudl.ferc1.init_db(refyear=2015, years=range(2007, 2016), def_db=True, verbose=True, testing=True) pudl.pudl.init_db(ferc1_tables=pc.ferc1_pudl_tables, ferc1_years=range(2007, 2016), eia923_tables=pc.eia923_pudl_tables, eia923_years=range(2011, 2016), verbose=True, debug=False, testing=True) ferc1_engine = pudl.ferc1.db_connect_ferc1(testing=True) pudl.ferc1.drop_tables_ferc1(ferc1_engine) pudl_engine = pudl.pudl.db_connect_pudl(testing=True) pudl.pudl.drop_tables_pudl(pudl_engine)
5487126bfc3c4fd16243b9c7e00b204f2f8d7374
tests/test_znc.py
tests/test_znc.py
def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening(Socket): socket = Socket('tcp://127.0.0.1:6666') assert socket.is_listening
from testinfra.utils.ansible_runner import AnsibleRunner testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') def test_service_enabled(Service): service = Service('znc') assert service.is_enabled def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening_ipv4(Socket): socket = Socket('tcp://0.0.0.0:6666') assert socket.is_listening def test_socket_listening_ipv6(Socket): socket = Socket('tcp://:::6666') assert not socket.is_listening
Tweak the infratest a bit
Tweak the infratest a bit
Python
mit
triplepoint/ansible-znc
def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening(Socket): socket = Socket('tcp://127.0.0.1:6666') assert socket.is_listening Tweak the infratest a bit
from testinfra.utils.ansible_runner import AnsibleRunner testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') def test_service_enabled(Service): service = Service('znc') assert service.is_enabled def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening_ipv4(Socket): socket = Socket('tcp://0.0.0.0:6666') assert socket.is_listening def test_socket_listening_ipv6(Socket): socket = Socket('tcp://:::6666') assert not socket.is_listening
<commit_before>def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening(Socket): socket = Socket('tcp://127.0.0.1:6666') assert socket.is_listening <commit_msg>Tweak the infratest a bit<commit_after>
from testinfra.utils.ansible_runner import AnsibleRunner testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') def test_service_enabled(Service): service = Service('znc') assert service.is_enabled def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening_ipv4(Socket): socket = Socket('tcp://0.0.0.0:6666') assert socket.is_listening def test_socket_listening_ipv6(Socket): socket = Socket('tcp://:::6666') assert not socket.is_listening
def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening(Socket): socket = Socket('tcp://127.0.0.1:6666') assert socket.is_listening Tweak the infratest a bitfrom testinfra.utils.ansible_runner import AnsibleRunner testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') def test_service_enabled(Service): service = Service('znc') assert service.is_enabled def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening_ipv4(Socket): socket = Socket('tcp://0.0.0.0:6666') assert socket.is_listening def test_socket_listening_ipv6(Socket): socket = Socket('tcp://:::6666') assert not socket.is_listening
<commit_before>def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening(Socket): socket = Socket('tcp://127.0.0.1:6666') assert socket.is_listening <commit_msg>Tweak the infratest a bit<commit_after>from testinfra.utils.ansible_runner import AnsibleRunner testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all') def test_service_enabled(Service): service = Service('znc') assert service.is_enabled def test_service_running(Service): service = Service('znc') assert service.is_running def test_socket_listening_ipv4(Socket): socket = Socket('tcp://0.0.0.0:6666') assert socket.is_listening def test_socket_listening_ipv6(Socket): socket = Socket('tcp://:::6666') assert not socket.is_listening
67b18247d48cc0a6e13526fdbe28756ea67e5166
shuup/guide/settings.py
shuup/guide/settings.py
# This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shoop-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2
# This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shuup-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2
Update guide url to shuup-guide.readthedocs.io
Update guide url to shuup-guide.readthedocs.io Refs SHUUP-3188
Python
agpl-3.0
shoopio/shoop,shoopio/shoop,suutari/shoop,suutari-ai/shoop,shoopio/shoop,suutari-ai/shoop,shawnadelic/shuup,suutari/shoop,shawnadelic/shuup,hrayr-artunyan/shuup,shawnadelic/shuup,hrayr-artunyan/shuup,suutari-ai/shoop,suutari/shoop,hrayr-artunyan/shuup
# This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shoop-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2 Update guide url to shuup-guide.readthedocs.io Refs SHUUP-3188
# This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shuup-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2
<commit_before># This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shoop-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2 <commit_msg>Update guide url to shuup-guide.readthedocs.io Refs SHUUP-3188<commit_after>
# This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shuup-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2
# This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shoop-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2 Update guide url to shuup-guide.readthedocs.io Refs SHUUP-3188# This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shuup-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2
<commit_before># This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shoop-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2 <commit_msg>Update guide url to shuup-guide.readthedocs.io Refs SHUUP-3188<commit_after># This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. #: ReadtheDocs API URL #: #: URL for fetching search results via ReadtheDocs API. SHUUP_GUIDE_API_URL = "https://readthedocs.org/api/v2/search/?project=shoop-guide&version=latest&" #: ReadtheDocs link URL #: #: URL for manually linking search query link. Query parameters are #: added to end of URL when constructing link. SHUUP_GUIDE_LINK_URL = "http://shuup-guide.readthedocs.io/en/latest/search.html?check_keywords=yes&area=default&" #: Whether or not to fetch search results from ReadtheDocs #: #: If true, fetch results via the ReadtheDocs API, otherwise only #: display a link to RTD search page. SHUUP_GUIDE_FETCH_RESULTS = True #: Timeout limit for fetching search results #: #: Time limit in seconds before a search result request should #: timeout, so as not to block search results in case of slow response. SHUUP_GUIDE_TIMEOUT_LIMIT = 2
0dd9fba16a73954a3bbb18c5b2de9995c07ef56f
pushbullet/filetype.py
pushbullet/filetype.py
def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type
def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type.decode("ASCII") def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type
Fix libmagic issue with Python 3
Fix libmagic issue with Python 3
Python
mit
kovacsbalu/pushbullet.py,randomchars/pushbullet.py,Saturn/pushbullet.py
def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type Fix libmagic issue with Python 3
def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type.decode("ASCII") def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type
<commit_before>def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type <commit_msg>Fix libmagic issue with Python 3<commit_after>
def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type.decode("ASCII") def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type
def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type Fix libmagic issue with Python 3def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type.decode("ASCII") def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type
<commit_before>def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type <commit_msg>Fix libmagic issue with Python 3<commit_after>def _magic_get_file_type(f, _): file_type = magic.from_buffer(f.read(1024), mime=True) f.seek(0) return file_type.decode("ASCII") def _guess_file_type(_, filename): return mimetypes.guess_type(filename)[0] try: import magic except ImportError: import mimetypes get_file_type = _guess_file_type else: get_file_type = _magic_get_file_type
9a7a0c015d0a1d23ae62ad45bcb9db0b58f4ed3e
clintools/deployed_settings.py
clintools/deployed_settings.py
from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' # TODO: change for deployment? # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }
from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' with open(os.path.join(BASE_DIR, 'secrets/database_password.txt')) as f: DB_PASSWORD = f.read().strip() DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'osler', 'USER': 'django', 'PASSWORD': DB_PASSWORD, 'HOST': 'localhost', # Or an IP Address that your DB is hosted on 'PORT': '3306', } }
Update deployed settings to access mysql.
Update deployed settings to access mysql.
Python
mit
SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools
from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' # TODO: change for deployment? # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }Update deployed settings to access mysql.
from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' with open(os.path.join(BASE_DIR, 'secrets/database_password.txt')) as f: DB_PASSWORD = f.read().strip() DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'osler', 'USER': 'django', 'PASSWORD': DB_PASSWORD, 'HOST': 'localhost', # Or an IP Address that your DB is hosted on 'PORT': '3306', } }
<commit_before>from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' # TODO: change for deployment? # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }<commit_msg>Update deployed settings to access mysql.<commit_after>
from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' with open(os.path.join(BASE_DIR, 'secrets/database_password.txt')) as f: DB_PASSWORD = f.read().strip() DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'osler', 'USER': 'django', 'PASSWORD': DB_PASSWORD, 'HOST': 'localhost', # Or an IP Address that your DB is hosted on 'PORT': '3306', } }
from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' # TODO: change for deployment? # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }Update deployed settings to access mysql.from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' with open(os.path.join(BASE_DIR, 'secrets/database_password.txt')) as f: DB_PASSWORD = f.read().strip() DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'osler', 'USER': 'django', 'PASSWORD': DB_PASSWORD, 'HOST': 'localhost', # Or an IP Address that your DB is hosted on 'PORT': '3306', } }
<commit_before>from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' # TODO: change for deployment? # https://docs.djangoproject.com/en/1.8/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'), } }<commit_msg>Update deployed settings to access mysql.<commit_after>from settings import * DEBUG = TEMPLATE_DEBUG = False ALLOWED_HOSTS = ['pttrack.snhc.wustl.edu'] with open(os.path.join(BASE_DIR, 'secrets/secret_key.txt')) as f: SECRET_KEY = f.read().strip() SECURE_CONTENT_TYPE_NOSNIFF = True SECURE_BROWSER_XSS_FILTER = True SECURE_SSL_REDIRECT = True SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True CSRF_COOKIE_HTTPONLY = True X_FRAME_OPTIONS = 'DENY' with open(os.path.join(BASE_DIR, 'secrets/database_password.txt')) as f: DB_PASSWORD = f.read().strip() DATABASES = { 'default': { 'ENGINE': 'django.db.backends.mysql', 'NAME': 'osler', 'USER': 'django', 'PASSWORD': DB_PASSWORD, 'HOST': 'localhost', # Or an IP Address that your DB is hosted on 'PORT': '3306', } }
5f6e8c9317f7c52198d6cd77fa819785072b5d6a
numba2/compiler/interpreter.py
numba2/compiler/interpreter.py
# -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args)
# -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp, tracing #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) tracer = tracing.Tracer() else: tracer = tracing.DummyTracer() # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args, tracer=tracer)
Use tracer in debug IR interpretation mode
Use tracer in debug IR interpretation mode
Python
bsd-2-clause
flypy/flypy,flypy/flypy
# -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args)Use tracer in debug IR interpretation mode
# -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp, tracing #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) tracer = tracing.Tracer() else: tracer = tracing.DummyTracer() # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args, tracer=tracer)
<commit_before># -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args)<commit_msg>Use tracer in debug IR interpretation mode<commit_after>
# -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp, tracing #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) tracer = tracing.Tracer() else: tracer = tracing.DummyTracer() # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args, tracer=tracer)
# -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args)Use tracer in debug IR interpretation mode# -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp, tracing #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) tracer = tracing.Tracer() else: tracer = tracing.DummyTracer() # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args, tracer=tracer)
<commit_before># -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args)<commit_msg>Use tracer in debug IR interpretation mode<commit_after># -*- coding: utf-8 -*- """ IR interpreter. Run a series of translation phases on a numba function, and interpreter the IR with the arguments. """ from __future__ import print_function, division, absolute_import from numba2 import typeof, jit from numba2.compiler.frontend import translate, interpret from numba2.pipeline import environment, phase from pykit.ir import interp, tracing #===------------------------------------------------------------------=== # Helpers #===------------------------------------------------------------------=== def expect(nb_func, phase, args, expected, handlers=None, debug=False): result = interpret(nb_func, phase, args, handlers, debug) assert result == expected, "Got %s, expected %s" % (result, expected) def interpret(nb_func, phase, args, handlers=None, debug=False): # Translate numba function argtypes = [typeof(arg) for arg in args] env = environment.fresh_env(nb_func, argtypes) f, env = phase(nb_func, env) if debug: print("---------------- Interpreting function %s ----------------" % ( f.name,)) print(f) print("----------------------- End of %s ------------------------" % ( f.name,)) tracer = tracing.Tracer() else: tracer = tracing.DummyTracer() # Interpreter function env.setdefault('interp.handlers', {}).update(handlers or {}) return interp.run(f, env, args=args, tracer=tracer)
bb897662f7f3fc17b32ffd06962fa5cb582fb6d7
easytz/middleware.py
easytz/middleware.py
from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() request.user._timezone = tz_store except UnknownTimeZoneError: pass else: timezone.deactivate()
from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # caching the timezone inside the user instance request.user._timezone = tz_store # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() except UnknownTimeZoneError: pass else: timezone.deactivate()
Set the timezone right after it gets activated.
Set the timezone right after it gets activated.
Python
apache-2.0
jamesmfriedman/django-easytz
from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() request.user._timezone = tz_store except UnknownTimeZoneError: pass else: timezone.deactivate() Set the timezone right after it gets activated.
from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # caching the timezone inside the user instance request.user._timezone = tz_store # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() except UnknownTimeZoneError: pass else: timezone.deactivate()
<commit_before>from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() request.user._timezone = tz_store except UnknownTimeZoneError: pass else: timezone.deactivate() <commit_msg>Set the timezone right after it gets activated.<commit_after>
from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # caching the timezone inside the user instance request.user._timezone = tz_store # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() except UnknownTimeZoneError: pass else: timezone.deactivate()
from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() request.user._timezone = tz_store except UnknownTimeZoneError: pass else: timezone.deactivate() Set the timezone right after it gets activated.from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # caching the timezone inside the user instance request.user._timezone = tz_store # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() except UnknownTimeZoneError: pass else: timezone.deactivate()
<commit_before>from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() request.user._timezone = tz_store except UnknownTimeZoneError: pass else: timezone.deactivate() <commit_msg>Set the timezone right after it gets activated.<commit_after>from django.conf import settings from django.utils import timezone from pytz import UnknownTimeZoneError from .models import TimezoneStore class TimezonesMiddleware(object): def process_request(self, request): """ Attempts to activate a timezone from a cookie or session """ if getattr(settings, 'USE_TZ'): # check the cookie and the session tz = request.COOKIES.get('timezone') session_tz = request.session.get('timezone') tz = tz or session_tz if tz: try: # attempt to activate the timezone. This might be an invalid # timezone or none, so the rest of the logic following is coniditional # on getting a valid timezone timezone.activate(tz) # caching the timezone inside the user instance request.user._timezone = tz_store # check to see if the session needs to be updated if request.user.is_authenticated() and session_tz != tz: request.session['timezone'] = tz request.session.save() # the session had to change, lets update the users database entry # to be safe tz_store, created = TimezoneStore.objects.get_or_create(user = request.user) tz_store.timezone = tz tz_store.save() except UnknownTimeZoneError: pass else: timezone.deactivate()
c035b951d85ffc60598968ca5a277afc416446a3
pylinks/main/models.py
pylinks/main/models.py
from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info'
from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' def save(self, *args, **kwargs): # Ensure the site cache is cleared so the new # info will be loaded next time rc = super(SiteInfo, self).save(*args, **kwargs) Site.objects.clear_cache() return rc
Clear sites cache on SiteInfo save
Clear sites cache on SiteInfo save
Python
mit
michaelmior/pylinks,michaelmior/pylinks,michaelmior/pylinks
from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' Clear sites cache on SiteInfo save
from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' def save(self, *args, **kwargs): # Ensure the site cache is cleared so the new # info will be loaded next time rc = super(SiteInfo, self).save(*args, **kwargs) Site.objects.clear_cache() return rc
<commit_before>from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' <commit_msg>Clear sites cache on SiteInfo save<commit_after>
from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' def save(self, *args, **kwargs): # Ensure the site cache is cleared so the new # info will be loaded next time rc = super(SiteInfo, self).save(*args, **kwargs) Site.objects.clear_cache() return rc
from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' Clear sites cache on SiteInfo savefrom django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' def save(self, *args, **kwargs): # Ensure the site cache is cleared so the new # info will be loaded next time rc = super(SiteInfo, self).save(*args, **kwargs) Site.objects.clear_cache() return rc
<commit_before>from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' <commit_msg>Clear sites cache on SiteInfo save<commit_after>from django.db import models from django.contrib.sites.models import Site class DatedModel(models.Model): created_time = models.DateTimeField(auto_now_add=True, null=True) updated_time = models.DateTimeField(auto_now=True, null=True) class Meta: abstract = True get_latest_by = 'updated_time' class SiteInfo(models.Model): site = models.OneToOneField(Site, primary_key=True) description = models.TextField(null=True) def __unicode__(self): return unicode(self.site) class Meta: verbose_name_plural = 'site info' def save(self, *args, **kwargs): # Ensure the site cache is cleared so the new # info will be loaded next time rc = super(SiteInfo, self).save(*args, **kwargs) Site.objects.clear_cache() return rc
2d2ced090f8ad8bfd12bfd6543af73918b16345b
nailgun/nailgun/extrasettings.py
nailgun/nailgun/extrasettings.py
import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes PATH_TO_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "id_rsa") PATH_TO_BOOTSTRAP_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "bootstrap.rsa") COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64"
import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes home = os.getenv("HOME") PATH_TO_SSH_KEY = home and os.path.join(home, ".ssh", "id_rsa") or None PATH_TO_BOOTSTRAP_SSH_KEY = home and \ os.path.join(home, ".ssh", "bootstrap.rsa") or None COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64"
Fix running nailgun server under Windows
Fix running nailgun server under Windows
Python
apache-2.0
zhaochao/fuel-web,zhaochao/fuel-main,SmartInfrastructures/fuel-web-dev,huntxu/fuel-web,eayunstack/fuel-main,huntxu/fuel-web,huntxu/fuel-web,nebril/fuel-web,zhaochao/fuel-web,SmartInfrastructures/fuel-main-dev,ddepaoli3/fuel-main-dev,AnselZhangGit/fuel-main,koder-ua/nailgun-fcert,eayunstack/fuel-web,prmtl/fuel-web,zhaochao/fuel-main,AnselZhangGit/fuel-main,SmartInfrastructures/fuel-web-dev,dancn/fuel-main-dev,stackforge/fuel-web,stackforge/fuel-web,eayunstack/fuel-main,huntxu/fuel-web,SergK/fuel-main,zhaochao/fuel-main,prmtl/fuel-web,ddepaoli3/fuel-main-dev,dancn/fuel-main-dev,teselkin/fuel-main,koder-ua/nailgun-fcert,zhaochao/fuel-web,dancn/fuel-main-dev,koder-ua/nailgun-fcert,eayunstack/fuel-web,teselkin/fuel-main,Fiware/ops.Fuel-main-dev,ddepaoli3/fuel-main-dev,koder-ua/nailgun-fcert,eayunstack/fuel-main,SergK/fuel-main,Fiware/ops.Fuel-main-dev,SergK/fuel-main,zhaochao/fuel-web,huntxu/fuel-main,Fiware/ops.Fuel-main-dev,stackforge/fuel-main,SmartInfrastructures/fuel-main-dev,Fiware/ops.Fuel-main-dev,prmtl/fuel-web,eayunstack/fuel-web,nebril/fuel-web,dancn/fuel-main-dev,zhaochao/fuel-main,zhaochao/fuel-main,SmartInfrastructures/fuel-main-dev,teselkin/fuel-main,AnselZhangGit/fuel-main,zhaochao/fuel-web,stackforge/fuel-main,nebril/fuel-web,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,ddepaoli3/fuel-main-dev,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,huntxu/fuel-main,teselkin/fuel-main,huntxu/fuel-main,AnselZhangGit/fuel-main,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,stackforge/fuel-web,SmartInfrastructures/fuel-main-dev,prmtl/fuel-web,nebril/fuel-web,stackforge/fuel-main,huntxu/fuel-web,prmtl/fuel-web
import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes PATH_TO_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "id_rsa") PATH_TO_BOOTSTRAP_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "bootstrap.rsa") COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64" Fix running nailgun server under Windows
import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes home = os.getenv("HOME") PATH_TO_SSH_KEY = home and os.path.join(home, ".ssh", "id_rsa") or None PATH_TO_BOOTSTRAP_SSH_KEY = home and \ os.path.join(home, ".ssh", "bootstrap.rsa") or None COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64"
<commit_before>import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes PATH_TO_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "id_rsa") PATH_TO_BOOTSTRAP_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "bootstrap.rsa") COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64" <commit_msg>Fix running nailgun server under Windows<commit_after>
import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes home = os.getenv("HOME") PATH_TO_SSH_KEY = home and os.path.join(home, ".ssh", "id_rsa") or None PATH_TO_BOOTSTRAP_SSH_KEY = home and \ os.path.join(home, ".ssh", "bootstrap.rsa") or None COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64"
import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes PATH_TO_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "id_rsa") PATH_TO_BOOTSTRAP_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "bootstrap.rsa") COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64" Fix running nailgun server under Windowsimport os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes home = os.getenv("HOME") PATH_TO_SSH_KEY = home and os.path.join(home, ".ssh", "id_rsa") or None PATH_TO_BOOTSTRAP_SSH_KEY = home and \ os.path.join(home, ".ssh", "bootstrap.rsa") or None COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64"
<commit_before>import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes PATH_TO_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "id_rsa") PATH_TO_BOOTSTRAP_SSH_KEY = os.path.join(os.getenv("HOME"), ".ssh", "bootstrap.rsa") COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64" <commit_msg>Fix running nailgun server under Windows<commit_after>import os import os.path LOGPATH = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..") LOGFILE = os.path.join(LOGPATH, "nailgun.log") LOGLEVEL = "DEBUG" CELERYLOGFILE = os.path.join(LOGPATH, "celery.log") CELERYLOGLEVEL = "DEBUG" CHEF_CONF_FOLDER = LOGPATH # For testing purposes home = os.getenv("HOME") PATH_TO_SSH_KEY = home and os.path.join(home, ".ssh", "id_rsa") or None PATH_TO_BOOTSTRAP_SSH_KEY = home and \ os.path.join(home, ".ssh", "bootstrap.rsa") or None COBBLER_URL = "http://localhost/cobbler_api" COBBLER_USER = "cobbler" COBBLER_PASSWORD = "cobbler" COBBLER_PROFILE = "centos-6.2-x86_64"
e250f7bd61206b53cdad6522ddef231b96ef373b
ncbi_genome_download/__main__.py
ncbi_genome_download/__main__.py
"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 ret = args_download(args) while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main()
"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 try: ret = args_download(args) except ValueError as err: print(err) return -2 while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main()
Print nicer error messages on invalid arguments
main: Print nicer error messages on invalid arguments Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk>
Python
apache-2.0
kblin/ncbi-genome-download,kblin/ncbi-genome-download
"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 ret = args_download(args) while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main() main: Print nicer error messages on invalid arguments Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk>
"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 try: ret = args_download(args) except ValueError as err: print(err) return -2 while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main()
<commit_before>"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 ret = args_download(args) while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main() <commit_msg>main: Print nicer error messages on invalid arguments Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk><commit_after>
"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 try: ret = args_download(args) except ValueError as err: print(err) return -2 while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main()
"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 ret = args_download(args) while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main() main: Print nicer error messages on invalid arguments Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk>"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 try: ret = args_download(args) except ValueError as err: print(err) return -2 while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main()
<commit_before>"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 ret = args_download(args) while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main() <commit_msg>main: Print nicer error messages on invalid arguments Signed-off-by: Kai Blin <ad3597797f6179d503c382b2627cc19939309418@biosustain.dtu.dk><commit_after>"""Command line handling for ncbi-genome-download.""" import logging from ncbi_genome_download import args_download from ncbi_genome_download import argument_parser from ncbi_genome_download import __version__ def main(): """Build and parse command line.""" parser = argument_parser(version=__version__) args = parser.parse_args() if args.debug: log_level = logging.DEBUG elif args.verbose: log_level = logging.INFO else: log_level = logging.WARNING logger = logging.getLogger("ncbi-genome-download") logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level) max_retries = args.retries attempts = 0 try: ret = args_download(args) except ValueError as err: print(err) return -2 while ret == 75 and attempts < max_retries: attempts += 1 logger.error( 'Downloading from NCBI failed due to a connection error, retrying. Retries so far: %s', attempts) ret = args_download(args) return ret if __name__ == '__main__': main()
e3aa2ca9d9fb74de6512acd04c509a41c176040a
pdc/apps/repository/filters.py
pdc/apps/repository/filters.py
# # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiValueFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',)
# # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter, MultiIntFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiIntFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',)
Fix product_id filter on content delivery repos
Fix product_id filter on content delivery repos The value should be an integer. JIRA: PDC-1104
Python
mit
release-engineering/product-definition-center,release-engineering/product-definition-center,lao605/product-definition-center,lao605/product-definition-center,product-definition-center/product-definition-center,pombredanne/product-definition-center,xychu/product-definition-center,xychu/product-definition-center,pombredanne/product-definition-center,product-definition-center/product-definition-center,product-definition-center/product-definition-center,lao605/product-definition-center,xychu/product-definition-center,pombredanne/product-definition-center,tzhaoredhat/automation,release-engineering/product-definition-center,release-engineering/product-definition-center,xychu/product-definition-center,tzhaoredhat/automation,lao605/product-definition-center,tzhaoredhat/automation,tzhaoredhat/automation,product-definition-center/product-definition-center,pombredanne/product-definition-center
# # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiValueFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',) Fix product_id filter on content delivery repos The value should be an integer. JIRA: PDC-1104
# # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter, MultiIntFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiIntFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',)
<commit_before># # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiValueFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',) <commit_msg>Fix product_id filter on content delivery repos The value should be an integer. JIRA: PDC-1104<commit_after>
# # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter, MultiIntFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiIntFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',)
# # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiValueFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',) Fix product_id filter on content delivery repos The value should be an integer. JIRA: PDC-1104# # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter, MultiIntFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiIntFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',)
<commit_before># # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiValueFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',) <commit_msg>Fix product_id filter on content delivery repos The value should be an integer. JIRA: PDC-1104<commit_after># # Copyright (c) 2015 Red Hat # Licensed under The MIT License (MIT) # http://opensource.org/licenses/MIT # import django_filters as filters from pdc.apps.common.filters import MultiValueFilter, MultiIntFilter from . import models class RepoFilter(filters.FilterSet): arch = MultiValueFilter(name='variant_arch__arch__name') content_category = MultiValueFilter(name='content_category__name') content_format = MultiValueFilter(name='content_format__name') release_id = MultiValueFilter(name='variant_arch__variant__release__release_id') variant_uid = MultiValueFilter(name='variant_arch__variant__variant_uid') repo_family = MultiValueFilter(name='repo_family__name') service = MultiValueFilter(name='service__name') shadow = filters.BooleanFilter() product_id = MultiIntFilter() class Meta: model = models.Repo fields = ('arch', 'content_category', 'content_format', 'name', 'release_id', 'repo_family', 'service', 'shadow', 'variant_uid', 'product_id') class RepoFamilyFilter(filters.FilterSet): name = filters.CharFilter(lookup_type="icontains") class Meta: model = models.RepoFamily fields = ('name',)
ba8231787a6f464ee946feaea9d853ee24894478
eJRF/snap-ci/snap-settings.py
eJRF/snap-ci/snap-settings.py
DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', )
import os DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', )
Revert "also removing the un-needed import"
Revert "also removing the un-needed import" This reverts commit 483a88a2c707a3e5d44b9db83c2a7d2184f9acff.
Python
bsd-3-clause
eJRF/ejrf,eJRF/ejrf,eJRF/ejrf,eJRF/ejrf
DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', ) Revert "also removing the un-needed import" This reverts commit 483a88a2c707a3e5d44b9db83c2a7d2184f9acff.
import os DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', )
<commit_before>DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', ) <commit_msg>Revert "also removing the un-needed import" This reverts commit 483a88a2c707a3e5d44b9db83c2a7d2184f9acff.<commit_after>
import os DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', )
DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', ) Revert "also removing the un-needed import" This reverts commit 483a88a2c707a3e5d44b9db83c2a7d2184f9acff.import os DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', )
<commit_before>DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', ) <commit_msg>Revert "also removing the un-needed import" This reverts commit 483a88a2c707a3e5d44b9db83c2a7d2184f9acff.<commit_after>import os DATABASES = { "default": { "ENGINE": "django.db.backends.postgresql_psycopg2", "NAME": "app_test", "USER": "go", "PASSWORD": "go", "HOST": "localhost", } } LETTUCE_AVOID_APPS = ( 'south', 'django_nose', 'lettuce.django', 'django_extensions', 'bootstrap_pagination', )
5c41286666290c2a067c51b7ab9ea171e4657d69
fb/models.py
fb/models.py
from django.db import models # Create your models here.
from django.db import models class UserPost(models.Model): text = models.TextField(max_length=200) date_added = models.DateTimeField(auto_now_add=True) author = models.CharField(default='Eau De Web', max_length=20) def __unicode__(self): return '{} @ {}'.format(self.author, self.date_added)
Write a model class for user posts.
Write a model class for user posts.
Python
apache-2.0
pure-python/brainmate
from django.db import models # Create your models here. Write a model class for user posts.
from django.db import models class UserPost(models.Model): text = models.TextField(max_length=200) date_added = models.DateTimeField(auto_now_add=True) author = models.CharField(default='Eau De Web', max_length=20) def __unicode__(self): return '{} @ {}'.format(self.author, self.date_added)
<commit_before>from django.db import models # Create your models here. <commit_msg>Write a model class for user posts.<commit_after>
from django.db import models class UserPost(models.Model): text = models.TextField(max_length=200) date_added = models.DateTimeField(auto_now_add=True) author = models.CharField(default='Eau De Web', max_length=20) def __unicode__(self): return '{} @ {}'.format(self.author, self.date_added)
from django.db import models # Create your models here. Write a model class for user posts.from django.db import models class UserPost(models.Model): text = models.TextField(max_length=200) date_added = models.DateTimeField(auto_now_add=True) author = models.CharField(default='Eau De Web', max_length=20) def __unicode__(self): return '{} @ {}'.format(self.author, self.date_added)
<commit_before>from django.db import models # Create your models here. <commit_msg>Write a model class for user posts.<commit_after>from django.db import models class UserPost(models.Model): text = models.TextField(max_length=200) date_added = models.DateTimeField(auto_now_add=True) author = models.CharField(default='Eau De Web', max_length=20) def __unicode__(self): return '{} @ {}'.format(self.author, self.date_added)
3c0ebc1a8e5a3d626a53eb94fb80c9466b46ee59
rplugin/python/vimp.py
rplugin/python/vimp.py
import neovim @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal")
import neovim import os from datetime import datetime @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") @neovim.command("ShowTodaysNotes") def open_log_file(self): filepath = os.path.join( os.environ.get("NOTES_DIR"), "%s.md" % datetime.now().strftime("%Y-%m-%d") ) self.vim.command("vs %s" % filepath)
Add command to quickly open a notes file
Add command to quickly open a notes file
Python
mit
vpetro/vimp
import neovim @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") Add command to quickly open a notes file
import neovim import os from datetime import datetime @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") @neovim.command("ShowTodaysNotes") def open_log_file(self): filepath = os.path.join( os.environ.get("NOTES_DIR"), "%s.md" % datetime.now().strftime("%Y-%m-%d") ) self.vim.command("vs %s" % filepath)
<commit_before>import neovim @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") <commit_msg>Add command to quickly open a notes file<commit_after>
import neovim import os from datetime import datetime @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") @neovim.command("ShowTodaysNotes") def open_log_file(self): filepath = os.path.join( os.environ.get("NOTES_DIR"), "%s.md" % datetime.now().strftime("%Y-%m-%d") ) self.vim.command("vs %s" % filepath)
import neovim @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") Add command to quickly open a notes fileimport neovim import os from datetime import datetime @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") @neovim.command("ShowTodaysNotes") def open_log_file(self): filepath = os.path.join( os.environ.get("NOTES_DIR"), "%s.md" % datetime.now().strftime("%Y-%m-%d") ) self.vim.command("vs %s" % filepath)
<commit_before>import neovim @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") <commit_msg>Add command to quickly open a notes file<commit_after>import neovim import os from datetime import datetime @neovim.plugin class VimpBuffers(object): def __init__(self, vim): self.vim = vim @neovim.command("ToggleTerminalBuffer") def toggle_terminal_buffer(self): current_buffer = self.vim.current.buffer # find the currently open terminal and close it for idx, window in enumerate(self.vim.current.tabpage.windows, 1): if window.buffer.name.startswith("term://"): self.vim.command("{0}wincmd w".format(idx)) self.vim.command("wincmd c".format(idx)) # go back to the window where the command was executed from window_number = self.vim.eval( "bufwinnr({0})".format(current_buffer.number) ) self.vim.command("{0}wincmd w".format(window_number)) return # find the terminal buffer in the list of buffers, open it in a split for buf in self.vim.buffers: if buf.name.startswith("term://"): self.vim.command("botright sb {0}".format(buf.number)) return # create a new terminal when there is no terminal buffer self.vim.command("botright new") self.vim.command(":terminal") @neovim.command("ShowTodaysNotes") def open_log_file(self): filepath = os.path.join( os.environ.get("NOTES_DIR"), "%s.md" % datetime.now().strftime("%Y-%m-%d") ) self.vim.command("vs %s" % filepath)
68f25d536945e06fae814bdf19218bc148f6cc93
backend/scripts/updatedf.py
backend/scripts/updatedf.py
#!/usr/bin/env python #import hashlib import os def main(): for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: print f if __name__ == "__main__": main()
#!/usr/bin/env python import hashlib import os import rethinkdb as r def main(): conn = r.connect('localhost', 28015, db='materialscommons') for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: path = os.path.join(root, f) with open(path) as fd: data = fd.read() hash = hashlib.md5(data).hexdigest() s = os.stat(path).st_size r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn) print "%s:%s:%d" %(path, hash, s) if __name__ == "__main__": main()
Update script to write results to the database.
Update script to write results to the database.
Python
mit
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
#!/usr/bin/env python #import hashlib import os def main(): for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: print f if __name__ == "__main__": main() Update script to write results to the database.
#!/usr/bin/env python import hashlib import os import rethinkdb as r def main(): conn = r.connect('localhost', 28015, db='materialscommons') for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: path = os.path.join(root, f) with open(path) as fd: data = fd.read() hash = hashlib.md5(data).hexdigest() s = os.stat(path).st_size r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn) print "%s:%s:%d" %(path, hash, s) if __name__ == "__main__": main()
<commit_before>#!/usr/bin/env python #import hashlib import os def main(): for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: print f if __name__ == "__main__": main() <commit_msg>Update script to write results to the database.<commit_after>
#!/usr/bin/env python import hashlib import os import rethinkdb as r def main(): conn = r.connect('localhost', 28015, db='materialscommons') for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: path = os.path.join(root, f) with open(path) as fd: data = fd.read() hash = hashlib.md5(data).hexdigest() s = os.stat(path).st_size r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn) print "%s:%s:%d" %(path, hash, s) if __name__ == "__main__": main()
#!/usr/bin/env python #import hashlib import os def main(): for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: print f if __name__ == "__main__": main() Update script to write results to the database.#!/usr/bin/env python import hashlib import os import rethinkdb as r def main(): conn = r.connect('localhost', 28015, db='materialscommons') for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: path = os.path.join(root, f) with open(path) as fd: data = fd.read() hash = hashlib.md5(data).hexdigest() s = os.stat(path).st_size r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn) print "%s:%s:%d" %(path, hash, s) if __name__ == "__main__": main()
<commit_before>#!/usr/bin/env python #import hashlib import os def main(): for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: print f if __name__ == "__main__": main() <commit_msg>Update script to write results to the database.<commit_after>#!/usr/bin/env python import hashlib import os import rethinkdb as r def main(): conn = r.connect('localhost', 28015, db='materialscommons') for root, dirs, files in os.walk("/mcfs/data/materialscommons"): for f in files: path = os.path.join(root, f) with open(path) as fd: data = fd.read() hash = hashlib.md5(data).hexdigest() s = os.stat(path).st_size r.table('datafiles').get(f).update({'size':s, 'checksum':hash}).run(conn) print "%s:%s:%d" %(path, hash, s) if __name__ == "__main__": main()
2844589a64ae8998b03cc1e3be7fee232618d9e9
test/txn_tests.py
test/txn_tests.py
from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2)
from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_transactions_list(self): """All transactions should be stored in table list""" self.assertEqual(self.table.transactions, [self.txn_1, self.txn_2, self.txn_3]) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2)
Test transaction list in table is ordered
Test transaction list in table is ordered
Python
mit
fongandrew/captable.py
from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2) Test transaction list in table is ordered
from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_transactions_list(self): """All transactions should be stored in table list""" self.assertEqual(self.table.transactions, [self.txn_1, self.txn_2, self.txn_3]) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2)
<commit_before>from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2) <commit_msg>Test transaction list in table is ordered<commit_after>
from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_transactions_list(self): """All transactions should be stored in table list""" self.assertEqual(self.table.transactions, [self.txn_1, self.txn_2, self.txn_3]) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2)
from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2) Test transaction list in table is orderedfrom __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_transactions_list(self): """All transactions should be stored in table list""" self.assertEqual(self.table.transactions, [self.txn_1, self.txn_2, self.txn_3]) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2)
<commit_before>from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2) <commit_msg>Test transaction list in table is ordered<commit_after>from __future__ import absolute_import import captable import unittest import datetime from ._helpers import StubTransaction class TransactionTests(unittest.TestCase): """Test adding transactions and processing them""" def setUp(self): """Initialize a blank captable and authorize multiple classes of securities""" self.table = captable.CapTable() self.txn_1 = StubTransaction(txn_datetime=datetime.datetime(2015,5,1)) self.txn_2 = StubTransaction(txn_datetime=datetime.datetime(2015,5,2)) self.txn_3 = StubTransaction(txn_datetime=datetime.datetime(2015,5,3)) # NB: Order added shouldn't matter -- should sort based on time self.table.record_txn(self.txn_1) self.table.record_txn(self.txn_3) self.table.record_txn(self.txn_2) def test_transactions_list(self): """All transactions should be stored in table list""" self.assertEqual(self.table.transactions, [self.txn_1, self.txn_2, self.txn_3]) def test_process_all(self): """Calling process without an argument processes all transactions""" state = self.table.process() StubTransaction.check(state, self.txn_1, self.txn_2, self.txn_3) def test_process_to_time(self): """Calling process with a datetime should process transactions up to (and including) that exact time""" state = self.table.process(datetime.datetime(2015, 5, 2)) StubTransaction.check(state, self.txn_1, self.txn_2)
04fa3a9fd61cc83c23ddd59ea474bd45cd2a1e8c
tests/__init__.py
tests/__init__.py
# coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path fs.__path__.insert(0, realpath(join(__file__, "..", "..", "fs")))
# coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path # Can only rely on fs.__path__ being an iterable - on windows it's not a list newPath = list(fs.__path__) newPath.insert(0, realpath(join(__file__, "..", "..", "fs"))) fs.__path__ = newPath
Make namespace packages work for tests in windows
Make namespace packages work for tests in windows
Python
mit
rkhwaja/fs.onedrivefs
# coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path fs.__path__.insert(0, realpath(join(__file__, "..", "..", "fs"))) Make namespace packages work for tests in windows
# coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path # Can only rely on fs.__path__ being an iterable - on windows it's not a list newPath = list(fs.__path__) newPath.insert(0, realpath(join(__file__, "..", "..", "fs"))) fs.__path__ = newPath
<commit_before># coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path fs.__path__.insert(0, realpath(join(__file__, "..", "..", "fs"))) <commit_msg>Make namespace packages work for tests in windows<commit_after>
# coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path # Can only rely on fs.__path__ being an iterable - on windows it's not a list newPath = list(fs.__path__) newPath.insert(0, realpath(join(__file__, "..", "..", "fs"))) fs.__path__ = newPath
# coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path fs.__path__.insert(0, realpath(join(__file__, "..", "..", "fs"))) Make namespace packages work for tests in windows# coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path # Can only rely on fs.__path__ being an iterable - on windows it's not a list newPath = list(fs.__path__) newPath.insert(0, realpath(join(__file__, "..", "..", "fs"))) fs.__path__ = newPath
<commit_before># coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path fs.__path__.insert(0, realpath(join(__file__, "..", "..", "fs"))) <commit_msg>Make namespace packages work for tests in windows<commit_after># coding: utf-8 from __future__ import unicode_literals from __future__ import absolute_import from os.path import join, realpath import fs # Add the local code directory to the `fs` module path # Can only rely on fs.__path__ being an iterable - on windows it's not a list newPath = list(fs.__path__) newPath.insert(0, realpath(join(__file__, "..", "..", "fs"))) fs.__path__ = newPath
3bcc646e1120e69a9aab412e22a4f85cce4da7bf
hashtable.py
hashtable.py
# Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1
# Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 def calculate_hash_value(self, string): value = ord(string[0])*100 + ord(string[1]) return value
Add calculate hash value method
Add calculate hash value method
Python
mit
derekmpham/interview-prep,derekmpham/interview-prep
# Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 Add calculate hash value method
# Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 def calculate_hash_value(self, string): value = ord(string[0])*100 + ord(string[1]) return value
<commit_before># Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 <commit_msg>Add calculate hash value method<commit_after>
# Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 def calculate_hash_value(self, string): value = ord(string[0])*100 + ord(string[1]) return value
# Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 Add calculate hash value method# Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 def calculate_hash_value(self, string): value = ord(string[0])*100 + ord(string[1]) return value
<commit_before># Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 <commit_msg>Add calculate hash value method<commit_after># Write hashtable class that stores strings in a hash table where keys are calculated using the first two letters of the string class HashTable(object): def __init__(self): self.table = [None]*10000 def store(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: self.table[hash_value].append(string) else: self.table[hash_value] = [string] def search(self, string): hash_value = self.calculate_hash_value(string) if hash_value != -1: if self.table[hash_value] != None: if string in self.table[hash_value]: return hash_value return -1 def calculate_hash_value(self, string): value = ord(string[0])*100 + ord(string[1]) return value
fba8f3a2595ebb032e86c09710ef4757ae87c428
heppy/modules/rgp.py
heppy/modules/rgp.py
from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'})
from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) def render_request(self, request, data): return self.render_default(request, data) def render_report(self, request, data): ext = self.render_extension(request, 'update') restore = request.add_subtag(ext, 'rgp:restore', { 'op': 'report'}) report = request.add_subtag(restore, 'rgp:report') request.add_subtag(report, 'rgp:preData', {}, data.get('preData')) request.add_subtag(report, 'rgp:postData', {}, data.get('postData')) request.add_subtag(report, 'rgp:delTime', {}, data.get('delTime')) request.add_subtag(report, 'rgp:resTime', {}, data.get('resTime')) request.add_subtag(report, 'rgp:resReason', {}, data.get('resReason', 'Registrant error')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'This registrar has not restored the Registered Name in order to assume the rights to use or sell the Registered Name for itself or for any third party')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'The information in this report is true to best of this registrar\'s knowledge, and this registrar acknowledges that intentionally supplying false information in this report shall constitute an incurable material breach of the Registry-Registrar Agreement'))
Add RGP request and report
Add RGP request and report
Python
bsd-3-clause
hiqdev/reppy
from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) Add RGP request and report
from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) def render_request(self, request, data): return self.render_default(request, data) def render_report(self, request, data): ext = self.render_extension(request, 'update') restore = request.add_subtag(ext, 'rgp:restore', { 'op': 'report'}) report = request.add_subtag(restore, 'rgp:report') request.add_subtag(report, 'rgp:preData', {}, data.get('preData')) request.add_subtag(report, 'rgp:postData', {}, data.get('postData')) request.add_subtag(report, 'rgp:delTime', {}, data.get('delTime')) request.add_subtag(report, 'rgp:resTime', {}, data.get('resTime')) request.add_subtag(report, 'rgp:resReason', {}, data.get('resReason', 'Registrant error')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'This registrar has not restored the Registered Name in order to assume the rights to use or sell the Registered Name for itself or for any third party')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'The information in this report is true to best of this registrar\'s knowledge, and this registrar acknowledges that intentionally supplying false information in this report shall constitute an incurable material breach of the Registry-Registrar Agreement'))
<commit_before>from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) <commit_msg>Add RGP request and report<commit_after>
from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) def render_request(self, request, data): return self.render_default(request, data) def render_report(self, request, data): ext = self.render_extension(request, 'update') restore = request.add_subtag(ext, 'rgp:restore', { 'op': 'report'}) report = request.add_subtag(restore, 'rgp:report') request.add_subtag(report, 'rgp:preData', {}, data.get('preData')) request.add_subtag(report, 'rgp:postData', {}, data.get('postData')) request.add_subtag(report, 'rgp:delTime', {}, data.get('delTime')) request.add_subtag(report, 'rgp:resTime', {}, data.get('resTime')) request.add_subtag(report, 'rgp:resReason', {}, data.get('resReason', 'Registrant error')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'This registrar has not restored the Registered Name in order to assume the rights to use or sell the Registered Name for itself or for any third party')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'The information in this report is true to best of this registrar\'s knowledge, and this registrar acknowledges that intentionally supplying false information in this report shall constitute an incurable material breach of the Registry-Registrar Agreement'))
from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) Add RGP request and reportfrom ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) def render_request(self, request, data): return self.render_default(request, data) def render_report(self, request, data): ext = self.render_extension(request, 'update') restore = request.add_subtag(ext, 'rgp:restore', { 'op': 'report'}) report = request.add_subtag(restore, 'rgp:report') request.add_subtag(report, 'rgp:preData', {}, data.get('preData')) request.add_subtag(report, 'rgp:postData', {}, data.get('postData')) request.add_subtag(report, 'rgp:delTime', {}, data.get('delTime')) request.add_subtag(report, 'rgp:resTime', {}, data.get('resTime')) request.add_subtag(report, 'rgp:resReason', {}, data.get('resReason', 'Registrant error')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'This registrar has not restored the Registered Name in order to assume the rights to use or sell the Registered Name for itself or for any third party')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'The information in this report is true to best of this registrar\'s knowledge, and this registrar acknowledges that intentionally supplying false information in this report shall constitute an incurable material breach of the Registry-Registrar Agreement'))
<commit_before>from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) <commit_msg>Add RGP request and report<commit_after>from ..Module import Module from ..TagData import TagData class rgp(Module): opmap = { 'infData': 'descend', } def parse_rgpStatus(self, response, tag): status = tag.attrib['s'] response.set(status, tag.text) def render_default(self, request, data): ext = self.render_extension(request, 'update') request.add_subtag(ext, 'rgp:restore', { 'op': 'request'}) def render_request(self, request, data): return self.render_default(request, data) def render_report(self, request, data): ext = self.render_extension(request, 'update') restore = request.add_subtag(ext, 'rgp:restore', { 'op': 'report'}) report = request.add_subtag(restore, 'rgp:report') request.add_subtag(report, 'rgp:preData', {}, data.get('preData')) request.add_subtag(report, 'rgp:postData', {}, data.get('postData')) request.add_subtag(report, 'rgp:delTime', {}, data.get('delTime')) request.add_subtag(report, 'rgp:resTime', {}, data.get('resTime')) request.add_subtag(report, 'rgp:resReason', {}, data.get('resReason', 'Registrant error')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'This registrar has not restored the Registered Name in order to assume the rights to use or sell the Registered Name for itself or for any third party')) request.add_subtag(report, 'rgp:statement', {}, data.get('statement', 'The information in this report is true to best of this registrar\'s knowledge, and this registrar acknowledges that intentionally supplying false information in this report shall constitute an incurable material breach of the Registry-Registrar Agreement'))
83e4e9c98739b6159649888ba561a6e28dfd3ca6
src/dicomweb_client/__init__.py
src/dicomweb_client/__init__.py
__version__ = '0.20.0rc' from dicomweb_client.api import DICOMwebClient
__version__ = '0.20.0' from dicomweb_client.api import DICOMwebClient
Update package version for release
Update package version for release
Python
mit
MGHComputationalPathology/dicomweb-client
__version__ = '0.20.0rc' from dicomweb_client.api import DICOMwebClient Update package version for release
__version__ = '0.20.0' from dicomweb_client.api import DICOMwebClient
<commit_before>__version__ = '0.20.0rc' from dicomweb_client.api import DICOMwebClient <commit_msg>Update package version for release<commit_after>
__version__ = '0.20.0' from dicomweb_client.api import DICOMwebClient
__version__ = '0.20.0rc' from dicomweb_client.api import DICOMwebClient Update package version for release__version__ = '0.20.0' from dicomweb_client.api import DICOMwebClient
<commit_before>__version__ = '0.20.0rc' from dicomweb_client.api import DICOMwebClient <commit_msg>Update package version for release<commit_after>__version__ = '0.20.0' from dicomweb_client.api import DICOMwebClient
33d8e9ce8be2901dab5998192559b0e1c3408807
kikola/core/context_processors.py
kikola/core/context_processors.py
def path(request): """ kikola.core.context_processors.path =================================== Adds current path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding current ``HttpRequest`` object to templates. """ return {'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path}
def path(request): """ Adds current absolute URI, path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding whole ``HttpRequest`` object to templates. """ return {'REQUEST_ABSOLUTE_URI': request.build_absolute_uri(), 'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path}
Make ``path`` context processor return request absolute URI too.
Make ``path`` context processor return request absolute URI too.
Python
bsd-3-clause
playpauseandstop/kikola
def path(request): """ kikola.core.context_processors.path =================================== Adds current path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding current ``HttpRequest`` object to templates. """ return {'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path} Make ``path`` context processor return request absolute URI too.
def path(request): """ Adds current absolute URI, path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding whole ``HttpRequest`` object to templates. """ return {'REQUEST_ABSOLUTE_URI': request.build_absolute_uri(), 'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path}
<commit_before>def path(request): """ kikola.core.context_processors.path =================================== Adds current path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding current ``HttpRequest`` object to templates. """ return {'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path} <commit_msg>Make ``path`` context processor return request absolute URI too.<commit_after>
def path(request): """ Adds current absolute URI, path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding whole ``HttpRequest`` object to templates. """ return {'REQUEST_ABSOLUTE_URI': request.build_absolute_uri(), 'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path}
def path(request): """ kikola.core.context_processors.path =================================== Adds current path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding current ``HttpRequest`` object to templates. """ return {'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path} Make ``path`` context processor return request absolute URI too.def path(request): """ Adds current absolute URI, path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding whole ``HttpRequest`` object to templates. """ return {'REQUEST_ABSOLUTE_URI': request.build_absolute_uri(), 'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path}
<commit_before>def path(request): """ kikola.core.context_processors.path =================================== Adds current path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding current ``HttpRequest`` object to templates. """ return {'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path} <commit_msg>Make ``path`` context processor return request absolute URI too.<commit_after>def path(request): """ Adds current absolute URI, path and full path variables to templates. To enable, adds ``kikola.core.context_processors.path`` to your project's ``settings`` ``TEMPLATE_CONTEXT_PROCESSORS`` var. **Note:** Django has ``django.core.context_processors.request`` context processor that adding whole ``HttpRequest`` object to templates. """ return {'REQUEST_ABSOLUTE_URI': request.build_absolute_uri(), 'REQUEST_FULL_PATH': request.get_full_path(), 'REQUEST_PATH': request.path}
730c7e6982f737c166924e1cae73eb34024fc4ef
AWSLambdas/vote.py
AWSLambdas/vote.py
""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = sample[0:19] event = base64.standard_b64decode(sample[18:]) print ident print event
""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr from decimal import Decimal def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = "b74z7/Q1TdqouIVyIXp+DQU=" """sample[0:19]""" event = "ChIJ1QvXETf7Z0sRBkcNQqQ89ag" """base64.standard_b64decode(sample[18:])""" up = 1 down = -1 table.update_item( Key={'event': event, 'id': ident}, UpdateExpression='ADD ups :up, downs :down', ExpressionAttributeValues={':up':{'N': up}, ':down':{'N': down}} )
Update the ups and downs members of the Samples items.
Update the ups and downs members of the Samples items.
Python
mit
SandcastleApps/partyup,SandcastleApps/partyup,SandcastleApps/partyup
""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = sample[0:19] event = base64.standard_b64decode(sample[18:]) print ident print event Update the ups and downs members of the Samples items.
""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr from decimal import Decimal def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = "b74z7/Q1TdqouIVyIXp+DQU=" """sample[0:19]""" event = "ChIJ1QvXETf7Z0sRBkcNQqQ89ag" """base64.standard_b64decode(sample[18:])""" up = 1 down = -1 table.update_item( Key={'event': event, 'id': ident}, UpdateExpression='ADD ups :up, downs :down', ExpressionAttributeValues={':up':{'N': up}, ':down':{'N': down}} )
<commit_before>""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = sample[0:19] event = base64.standard_b64decode(sample[18:]) print ident print event <commit_msg>Update the ups and downs members of the Samples items.<commit_after>
""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr from decimal import Decimal def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = "b74z7/Q1TdqouIVyIXp+DQU=" """sample[0:19]""" event = "ChIJ1QvXETf7Z0sRBkcNQqQ89ag" """base64.standard_b64decode(sample[18:])""" up = 1 down = -1 table.update_item( Key={'event': event, 'id': ident}, UpdateExpression='ADD ups :up, downs :down', ExpressionAttributeValues={':up':{'N': up}, ':down':{'N': down}} )
""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = sample[0:19] event = base64.standard_b64decode(sample[18:]) print ident print event Update the ups and downs members of the Samples items.""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr from decimal import Decimal def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = "b74z7/Q1TdqouIVyIXp+DQU=" """sample[0:19]""" event = "ChIJ1QvXETf7Z0sRBkcNQqQ89ag" """base64.standard_b64decode(sample[18:])""" up = 1 down = -1 table.update_item( Key={'event': event, 'id': ident}, UpdateExpression='ADD ups :up, downs :down', ExpressionAttributeValues={':up':{'N': up}, ':down':{'N': down}} )
<commit_before>""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = sample[0:19] event = base64.standard_b64decode(sample[18:]) print ident print event <commit_msg>Update the ups and downs members of the Samples items.<commit_after>""" Watch Votes stream and update Sample ups and downs """ import json import boto3 import time import decimal import base64 from boto3.dynamodb.conditions import Key, Attr from decimal import Decimal def consolidate_disposition(disposition_map, records): for record in records: type = record['eventName'] disposition = 0 if type == "INSERT" or type == "MODIFY": disposition = int(record['dynamodb']['NewImage']['vote']['N']) if type == "MODIFY" or type == "REMOVE": disposition += -int(record['dynamodb']['OldImage']['vote']['N']) sample = record['dynamodb']['Keys']['sample']['B'] disposition_map[sample] = disposition_map.get(sample, 0) + disposition def vote_handler(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table('Samples') ratings = dict() consolidate_disposition(ratings, event['Records']) for (sample, vote) in ratings.iteritems(): ident = "b74z7/Q1TdqouIVyIXp+DQU=" """sample[0:19]""" event = "ChIJ1QvXETf7Z0sRBkcNQqQ89ag" """base64.standard_b64decode(sample[18:])""" up = 1 down = -1 table.update_item( Key={'event': event, 'id': ident}, UpdateExpression='ADD ups :up, downs :down', ExpressionAttributeValues={':up':{'N': up}, ':down':{'N': down}} )
6373e170c77079e304435d4c2e68201e29a7ecce
python/torque-and-development.py
python/torque-and-development.py
#!/bin/python3 import math import os import random import re import sys # Complete the roadsAndLibraries function below. def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close()
#!/bin/python3 import math import os import random import re import sys # Note the name of the file is based on this URL: # https://www.hackerrank.com/challenges/torque-and-development/problem # The problem name is "Roads and Libraries" def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close()
Include dev comment explaing filename
Include dev comment explaing filename
Python
mit
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
#!/bin/python3 import math import os import random import re import sys # Complete the roadsAndLibraries function below. def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close() Include dev comment explaing filename
#!/bin/python3 import math import os import random import re import sys # Note the name of the file is based on this URL: # https://www.hackerrank.com/challenges/torque-and-development/problem # The problem name is "Roads and Libraries" def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close()
<commit_before> #!/bin/python3 import math import os import random import re import sys # Complete the roadsAndLibraries function below. def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close() <commit_msg>Include dev comment explaing filename<commit_after>
#!/bin/python3 import math import os import random import re import sys # Note the name of the file is based on this URL: # https://www.hackerrank.com/challenges/torque-and-development/problem # The problem name is "Roads and Libraries" def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close()
#!/bin/python3 import math import os import random import re import sys # Complete the roadsAndLibraries function below. def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close() Include dev comment explaing filename #!/bin/python3 import math import os import random import re import sys # Note the name of the file is based on this URL: # https://www.hackerrank.com/challenges/torque-and-development/problem # The problem name is "Roads and Libraries" def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close()
<commit_before> #!/bin/python3 import math import os import random import re import sys # Complete the roadsAndLibraries function below. def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close() <commit_msg>Include dev comment explaing filename<commit_after> #!/bin/python3 import math import os import random import re import sys # Note the name of the file is based on this URL: # https://www.hackerrank.com/challenges/torque-and-development/problem # The problem name is "Roads and Libraries" def roadsAndLibraries(n, c_lib, c_road, cities): print("n {}, c_lib {}, c_road {}, cities {}".format(n, c_lib, c_road, cities)) return 0 if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') num_queries = int(input()) for _query in range(num_queries): n, m, c_lib, c_road = list(map(int, input().split())) cities = [] for _city in range(m): cities.append(list(map(int, input().rstrip().split()))) result = roadsAndLibraries(n, c_lib, c_road, cities) fptr.write(str(result) + '\n') fptr.close()
cfffc18c5179a441c0412f313e20a1b3b7059f1c
dump_contributors.py
dump_contributors.py
import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = "# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = "| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" formatted = (template.format(**dct) for dct in as_dict) with open("CONTRIBUTORS.md", 'w') as f: f.write(header) for item in formatted: f.write(item)
# -*- coding: utf-8 -*- import io import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = u"# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = u"| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" with io.open("CONTRIBUTORS.md", 'w', encoding="utf-8") as f: f.write(header) f.writelines(template.format(**dct) for dct in as_dict)
Make contributor dump a bit more robust
Make contributor dump a bit more robust
Python
mit
urschrei/pyzotero
import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = "# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = "| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" formatted = (template.format(**dct) for dct in as_dict) with open("CONTRIBUTORS.md", 'w') as f: f.write(header) for item in formatted: f.write(item) Make contributor dump a bit more robust
# -*- coding: utf-8 -*- import io import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = u"# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = u"| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" with io.open("CONTRIBUTORS.md", 'w', encoding="utf-8") as f: f.write(header) f.writelines(template.format(**dct) for dct in as_dict)
<commit_before>import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = "# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = "| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" formatted = (template.format(**dct) for dct in as_dict) with open("CONTRIBUTORS.md", 'w') as f: f.write(header) for item in formatted: f.write(item) <commit_msg>Make contributor dump a bit more robust<commit_after>
# -*- coding: utf-8 -*- import io import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = u"# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = u"| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" with io.open("CONTRIBUTORS.md", 'w', encoding="utf-8") as f: f.write(header) f.writelines(template.format(**dct) for dct in as_dict)
import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = "# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = "| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" formatted = (template.format(**dct) for dct in as_dict) with open("CONTRIBUTORS.md", 'w') as f: f.write(header) for item in formatted: f.write(item) Make contributor dump a bit more robust# -*- coding: utf-8 -*- import io import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = u"# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = u"| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" with io.open("CONTRIBUTORS.md", 'w', encoding="utf-8") as f: f.write(header) f.writelines(template.format(**dct) for dct in as_dict)
<commit_before>import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = "# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = "| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" formatted = (template.format(**dct) for dct in as_dict) with open("CONTRIBUTORS.md", 'w') as f: f.write(header) for item in formatted: f.write(item) <commit_msg>Make contributor dump a bit more robust<commit_after># -*- coding: utf-8 -*- import io import requests url = "https://api.github.com/repos/urschrei/pyzotero/contributors" result = requests.get(url) result.raise_for_status() as_dict = result.json() # remove me from the list as_dict.pop(0) header = u"# This is the list of people (as distinct from [AUTHORS](AUTHORS)) who have contributed code to Pyzotero.\n\n| **Commits** | **Contributor**<br/> |\n| --- |--- |\n" template = u"| {contributions} | [{login}](https://github.com/urschrei/pyzotero/commits?author={login}) |\n" with io.open("CONTRIBUTORS.md", 'w', encoding="utf-8") as f: f.write(header) f.writelines(template.format(**dct) for dct in as_dict)
88bf90d2949da603567e75f2492e5880b2ff8009
build_scripts/rename_wheels.py
build_scripts/rename_wheels.py
# renames ABI string in wheels from cp34m or cp35m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1]))
# renames ABI string in wheels from cp34m, cp35m, cp36m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp36m-' in file: file_parts = file.split('-cp36m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1]))
Add support for python 3.5 and 3.6
Add support for python 3.5 and 3.6
Python
mit
missionpinball/mpf-mc,missionpinball/mpf-mc,missionpinball/mpf-mc
# renames ABI string in wheels from cp34m or cp35m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) Add support for python 3.5 and 3.6
# renames ABI string in wheels from cp34m, cp35m, cp36m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp36m-' in file: file_parts = file.split('-cp36m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1]))
<commit_before># renames ABI string in wheels from cp34m or cp35m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) <commit_msg>Add support for python 3.5 and 3.6<commit_after>
# renames ABI string in wheels from cp34m, cp35m, cp36m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp36m-' in file: file_parts = file.split('-cp36m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1]))
# renames ABI string in wheels from cp34m or cp35m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) Add support for python 3.5 and 3.6# renames ABI string in wheels from cp34m, cp35m, cp36m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp36m-' in file: file_parts = file.split('-cp36m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1]))
<commit_before># renames ABI string in wheels from cp34m or cp35m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) <commit_msg>Add support for python 3.5 and 3.6<commit_after># renames ABI string in wheels from cp34m, cp35m, cp36m to none import os for file in os.listdir('../dist'): if '-cp34m-' in file: file_parts = file.split('-cp34m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp35m-' in file: file_parts = file.split('-cp35m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1])) elif '-cp36m-' in file: file_parts = file.split('-cp36m-') os.rename('../dist/{}'.format(file), '../dist/{}-none-{}'.format(file_parts[0], file_parts[1]))
15a5e861e63fa5b2662968ce4296c75ecfadee50
iscc_bench/readers/__init__.py
iscc_bench/readers/__init__.py
# -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks ALL_READERS = (bxbooks,)
# -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks from iscc_bench.readers.harvard import harvard ALL_READERS = (bxbooks, harvard)
Add harvard reader to ALL_READERS
Add harvard reader to ALL_READERS
Python
bsd-2-clause
coblo/isccbench
# -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks ALL_READERS = (bxbooks,) Add harvard reader to ALL_READERS
# -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks from iscc_bench.readers.harvard import harvard ALL_READERS = (bxbooks, harvard)
<commit_before># -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks ALL_READERS = (bxbooks,) <commit_msg>Add harvard reader to ALL_READERS<commit_after>
# -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks from iscc_bench.readers.harvard import harvard ALL_READERS = (bxbooks, harvard)
# -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks ALL_READERS = (bxbooks,) Add harvard reader to ALL_READERS# -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks from iscc_bench.readers.harvard import harvard ALL_READERS = (bxbooks, harvard)
<commit_before># -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks ALL_READERS = (bxbooks,) <commit_msg>Add harvard reader to ALL_READERS<commit_after># -*- coding: utf-8 -*- from iscc_bench.readers.bxbooks import bxbooks from iscc_bench.readers.harvard import harvard ALL_READERS = (bxbooks, harvard)
ae260be14e575d9678bd20e94c44c70beb182848
twitterexample.py
twitterexample.py
import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s)" % (tweet.user.name, tweet.user.get_profile_url())
import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() created_at = micromodels.DateTimeField(format="%a %b %d %H:%M:%S +0000 %Y") user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s) on a %s" % ( tweet.user.name, tweet.user.get_profile_url(), tweet.created_at.strftime("%A") )
Add created_at DateTimeField to Twitter example
Add created_at DateTimeField to Twitter example
Python
unlicense
j4mie/micromodels
import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s)" % (tweet.user.name, tweet.user.get_profile_url()) Add created_at DateTimeField to Twitter example
import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() created_at = micromodels.DateTimeField(format="%a %b %d %H:%M:%S +0000 %Y") user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s) on a %s" % ( tweet.user.name, tweet.user.get_profile_url(), tweet.created_at.strftime("%A") )
<commit_before>import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s)" % (tweet.user.name, tweet.user.get_profile_url()) <commit_msg>Add created_at DateTimeField to Twitter example<commit_after>
import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() created_at = micromodels.DateTimeField(format="%a %b %d %H:%M:%S +0000 %Y") user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s) on a %s" % ( tweet.user.name, tweet.user.get_profile_url(), tweet.created_at.strftime("%A") )
import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s)" % (tweet.user.name, tweet.user.get_profile_url()) Add created_at DateTimeField to Twitter exampleimport json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() created_at = micromodels.DateTimeField(format="%a %b %d %H:%M:%S +0000 %Y") user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s) on a %s" % ( tweet.user.name, tweet.user.get_profile_url(), tweet.created_at.strftime("%A") )
<commit_before>import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s)" % (tweet.user.name, tweet.user.get_profile_url()) <commit_msg>Add created_at DateTimeField to Twitter example<commit_after>import json from urllib2 import urlopen import micromodels class TwitterUser(micromodels.Model): id = micromodels.IntegerField() screen_name = micromodels.CharField() name = micromodels.CharField() description = micromodels.CharField() def get_profile_url(self): return 'http://twitter.com/%s' % self.screen_name class Tweet(micromodels.Model): id = micromodels.IntegerField() text = micromodels.CharField() created_at = micromodels.DateTimeField(format="%a %b %d %H:%M:%S +0000 %Y") user = micromodels.ModelField(TwitterUser) json_data = urlopen('http://api.twitter.com/1/statuses/show/20.json').read() tweet = Tweet(json.loads(json_data)) print "Tweet was posted by %s (%s) on a %s" % ( tweet.user.name, tweet.user.get_profile_url(), tweet.created_at.strftime("%A") )
a69edf3e488125067371a96626b7f3cd45e9a11f
inventory.py
inventory.py
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return url_for('index') return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run()
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] try: return url_for('index') except Exception, e: return e return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run()
Add try for catching server error
Add try for catching server error
Python
mit
lcdi/Inventory,lcdi/Inventory,lcdi/Inventory,lcdi/Inventory
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return url_for('index') return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run() Add try for catching server error
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] try: return url_for('index') except Exception, e: return e return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run()
<commit_before>from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return url_for('index') return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run() <commit_msg>Add try for catching server error<commit_after>
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] try: return url_for('index') except Exception, e: return e return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run()
from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return url_for('index') return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run() Add try for catching server errorfrom flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] try: return url_for('index') except Exception, e: return e return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run()
<commit_before>from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] return url_for('index') return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run() <commit_msg>Add try for catching server error<commit_after>from flask import Flask, render_template, url_for, redirect from flask import session, escape, request from peewee import * #from datetime import date app = Flask(__name__) # http://docs.peewee-orm.com/en/latest/peewee/quickstart.html database = SqliteDatabase('developmentData.db') #class Device(Model): # idNumber = IntField() # serialNumber = CharField() # typeCategory = CharField() # description = TextField() # issues = TextField() # photo = CharField() # quality = CharField() @app.route('/') def index(): # http://flask.pocoo.org/snippets/15/ if 'username' in session: return render_template('inventory.html', inventoryData="", deviceLogData="") return redirect(url_for('login')) @app.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': session['username'] = request.form['username'] try: return url_for('index') except Exception, e: return e return '''<form action="" method="post"><p><input type=text name=username><p><input type=submit value=Login></form>''' @app.route('/logout') def logout(): session.pop('username', None) return redirect(url_for('index')) if __name__ == '__main__': db.connect() app.run()
6ba9c6b0e7fc4bb4eaa8425c9de172a9ada612c6
src/tests.py
src/tests.py
from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/messages/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name)
from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/cats/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name)
Fix test case bug where wrong URL was being hit
Fix test case bug where wrong URL was being hit
Python
mit
makmanalp/flask-chassis
from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/messages/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name) Fix test case bug where wrong URL was being hit
from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/cats/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name)
<commit_before>from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/messages/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name) <commit_msg>Fix test case bug where wrong URL was being hit<commit_after>
from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/cats/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name)
from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/messages/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name) Fix test case bug where wrong URL was being hitfrom flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/cats/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name)
<commit_before>from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/messages/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name) <commit_msg>Fix test case bug where wrong URL was being hit<commit_after>from flask.ext.testing import TestCase from chassis import create_app from chassis.models import db import factories class ChassisTestCase(TestCase): """Base TestCase to add in convenience functions, defaults and custom asserts.""" def create_app(self): return create_app() def setUp(self): db.create_all() def tearDown(self): db.session.remove() db.drop_all() class TestCat(ChassisTestCase): SQLALCHEMY_DATABASE_URI = "sqlite://:memory:" def test_get_cat(self): """Test to see if you can get a message by ID.""" cat = factories.Cat() db.session.commit() response = self.client.get("/cats/" + str(cat.id)) self.assert_200(response) resp_json = response.json self.assertEquals(resp_json["id"], str(cat.id)) self.assertEquals(resp_json["born_at"], cat.born_at) self.assertEquals(resp_json["name"], cat.name)
6021c4c54cb0a437878553a1e23f8d433476ff2d
main.py
main.py
import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run()
import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data if 'list' in data: cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] else: cities = [] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run()
Stop crashing when search doesn't have any matches
Stop crashing when search doesn't have any matches
Python
mit
ciappi/Weather
import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run() Stop crashing when search doesn't have any matches
import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data if 'list' in data: cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] else: cities = [] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run()
<commit_before>import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run() <commit_msg>Stop crashing when search doesn't have any matches<commit_after>
import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data if 'list' in data: cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] else: cities = [] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run()
import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run() Stop crashing when search doesn't have any matchesimport json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data if 'list' in data: cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] else: cities = [] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run()
<commit_before>import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run() <commit_msg>Stop crashing when search doesn't have any matches<commit_after>import json from kivy.app import App from kivy.uix.boxlayout import BoxLayout from kivy.properties import ObjectProperty from kivy.network.urlrequest import UrlRequest class AddLocationForm(BoxLayout): search_input = ObjectProperty() search_results = ObjectProperty() def search_location(self): search_template = ("http://api.openweathermap.org/data/2.5/" + "find?q={}&type=like") search_url = search_template.format(self.search_input.text) request = UrlRequest(search_url, self.found_location) def found_location(self, request, data): data = json.loads(data.decode()) if not isinstance(data, dict) else data if 'list' in data: cities = ["{} ({})".format(d['name'], d['sys']['country']) for d in data['list']] else: cities = [] self.search_results.item_strings = cities class WeatherApp(App): pass if __name__ == '__main__': WeatherApp().run()
9605a14372aaf2ad4315bad11053839cfe75e50e
main.py
main.py
""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main()
""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.prev_act = action character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main()
Set previous action after executing
Set previous action after executing
Python
apache-2.0
SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame,SageBerg/St.GeorgeGame
""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main() Set previous action after executing
""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.prev_act = action character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main()
<commit_before>""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main() <commit_msg>Set previous action after executing<commit_after>
""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.prev_act = action character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main()
""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main() Set previous action after executing""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.prev_act = action character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main()
<commit_before>""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main() <commit_msg>Set previous action after executing<commit_after>""" St. George Game main.py Sage Berg, Skyler Berg Created: 5 Dec 2014 """ import places from character import Character from display import Display from actions import AskAboutAssassins, BuyADrink, LeaveInAHuff, SingASong def main(): display = Display() display.enable() character = Character() character.place = places.tavern character.actions["a"] = AskAboutAssassins() character.actions["b"] = BuyADrink() character.actions["c"] = LeaveInAHuff() character.actions["d"] = SingASong() display.write("\n---The St. George Game---\n") display.write("You are in a tavern. The local assassins hate you.") while character.alive and character.alone: action = character.choose_action() display.enable() action.execute(character) character.prev_act = action character.generate_actions() if __name__ == "__main__": while True: # the game automatically restarts main()
80933f496ef57abe7335fd9490acf4a1f4a53648
nose2/tests/functional/test_coverage.py
nose2/tests/functional/test_coverage.py
from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) stdout, stderr = proc.communicate() self.assertTestRunOutputMatches(proc, stderr='lib/mod1 8 5 38%') self.assertTestRunOutputMatches(proc, stderr='TOTAL 8 5 38%')
import os.path from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) STATS = ' 8 5 38%' stdout, stderr = proc.communicate() self.assertTestRunOutputMatches( proc, stderr=os.path.join('lib', 'mod1').replace('\\', r'\\') + STATS) self.assertTestRunOutputMatches( proc, stderr='TOTAL ' + STATS)
Make test also work on Windows that uses backslash as path delimiter
Make test also work on Windows that uses backslash as path delimiter
Python
bsd-2-clause
little-dude/nose2,little-dude/nose2,ezigman/nose2,ezigman/nose2,ojengwa/nose2,ptthiem/nose2,ptthiem/nose2,ojengwa/nose2
from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) stdout, stderr = proc.communicate() self.assertTestRunOutputMatches(proc, stderr='lib/mod1 8 5 38%') self.assertTestRunOutputMatches(proc, stderr='TOTAL 8 5 38%') Make test also work on Windows that uses backslash as path delimiter
import os.path from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) STATS = ' 8 5 38%' stdout, stderr = proc.communicate() self.assertTestRunOutputMatches( proc, stderr=os.path.join('lib', 'mod1').replace('\\', r'\\') + STATS) self.assertTestRunOutputMatches( proc, stderr='TOTAL ' + STATS)
<commit_before>from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) stdout, stderr = proc.communicate() self.assertTestRunOutputMatches(proc, stderr='lib/mod1 8 5 38%') self.assertTestRunOutputMatches(proc, stderr='TOTAL 8 5 38%') <commit_msg>Make test also work on Windows that uses backslash as path delimiter<commit_after>
import os.path from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) STATS = ' 8 5 38%' stdout, stderr = proc.communicate() self.assertTestRunOutputMatches( proc, stderr=os.path.join('lib', 'mod1').replace('\\', r'\\') + STATS) self.assertTestRunOutputMatches( proc, stderr='TOTAL ' + STATS)
from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) stdout, stderr = proc.communicate() self.assertTestRunOutputMatches(proc, stderr='lib/mod1 8 5 38%') self.assertTestRunOutputMatches(proc, stderr='TOTAL 8 5 38%') Make test also work on Windows that uses backslash as path delimiterimport os.path from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) STATS = ' 8 5 38%' stdout, stderr = proc.communicate() self.assertTestRunOutputMatches( proc, stderr=os.path.join('lib', 'mod1').replace('\\', r'\\') + STATS) self.assertTestRunOutputMatches( proc, stderr='TOTAL ' + STATS)
<commit_before>from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) stdout, stderr = proc.communicate() self.assertTestRunOutputMatches(proc, stderr='lib/mod1 8 5 38%') self.assertTestRunOutputMatches(proc, stderr='TOTAL 8 5 38%') <commit_msg>Make test also work on Windows that uses backslash as path delimiter<commit_after>import os.path from nose2.tests._common import FunctionalTestCase class TestCoverage(FunctionalTestCase): def test_run(self): proc = self.runIn( 'scenario/test_with_module', '-v', '--with-coverage', '--coverage=lib/' ) STATS = ' 8 5 38%' stdout, stderr = proc.communicate() self.assertTestRunOutputMatches( proc, stderr=os.path.join('lib', 'mod1').replace('\\', r'\\') + STATS) self.assertTestRunOutputMatches( proc, stderr='TOTAL ' + STATS)
e17367b4e5f865db4947fc4139baa0974d4a7326
social/apps/tornado_app/routes.py
social/apps/tornado_app/routes.py
from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/?', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ]
from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ]
Fix redirect_uri issue with tornado reversed url
Fix redirect_uri issue with tornado reversed url When reversing URLs, tornado doesn't interpret the regex optional symbol '?'. This causes the redirect_uri to be https://example.com/complete/mybackend/? with the question mark appended. Some providers will simply append to this uri, causing URLs like https://example.com/complete/mybackend/??code=.... with two question marks. This makes the interpretation of the query string fail. The provider in this case is https://github.com/juanifioren/django-oidc-provider. Arguably that library should be smarter in constructing the redirection, but removing the question mark from the uri solves these kind of issues. Alternatively we could strip the question mark from the uri in the tornado strategy, but this seemed simpler.
Python
bsd-3-clause
lneoe/python-social-auth,contracode/python-social-auth,python-social-auth/social-core,python-social-auth/social-storage-sqlalchemy,contracode/python-social-auth,ByteInternet/python-social-auth,python-social-auth/social-docs,cmichal/python-social-auth,cjltsod/python-social-auth,python-social-auth/social-app-django,cmichal/python-social-auth,python-social-auth/social-app-django,python-social-auth/social-core,ByteInternet/python-social-auth,contracode/python-social-auth,mrwags/python-social-auth,mrwags/python-social-auth,python-social-auth/social-app-cherrypy,falcon1kr/python-social-auth,cjltsod/python-social-auth,tkajtoch/python-social-auth,fearlessspider/python-social-auth,mathspace/python-social-auth,ariestiyansyah/python-social-auth,tkajtoch/python-social-auth,mathspace/python-social-auth,fearlessspider/python-social-auth,S01780/python-social-auth,fearlessspider/python-social-auth,merutak/python-social-auth,webjunkie/python-social-auth,python-social-auth/social-app-django,merutak/python-social-auth,mrwags/python-social-auth,falcon1kr/python-social-auth,webjunkie/python-social-auth,ariestiyansyah/python-social-auth,muhammad-ammar/python-social-auth,rsalmaso/python-social-auth,muhammad-ammar/python-social-auth,falcon1kr/python-social-auth,merutak/python-social-auth,lneoe/python-social-auth,muhammad-ammar/python-social-auth,tobias47n9e/social-core,lneoe/python-social-auth,cmichal/python-social-auth,rsalmaso/python-social-auth,S01780/python-social-auth,S01780/python-social-auth,ByteInternet/python-social-auth,ariestiyansyah/python-social-auth,tkajtoch/python-social-auth,mathspace/python-social-auth,webjunkie/python-social-auth
from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/?', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ] Fix redirect_uri issue with tornado reversed url When reversing URLs, tornado doesn't interpret the regex optional symbol '?'. This causes the redirect_uri to be https://example.com/complete/mybackend/? with the question mark appended. Some providers will simply append to this uri, causing URLs like https://example.com/complete/mybackend/??code=.... with two question marks. This makes the interpretation of the query string fail. The provider in this case is https://github.com/juanifioren/django-oidc-provider. Arguably that library should be smarter in constructing the redirection, but removing the question mark from the uri solves these kind of issues. Alternatively we could strip the question mark from the uri in the tornado strategy, but this seemed simpler.
from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ]
<commit_before>from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/?', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ] <commit_msg>Fix redirect_uri issue with tornado reversed url When reversing URLs, tornado doesn't interpret the regex optional symbol '?'. This causes the redirect_uri to be https://example.com/complete/mybackend/? with the question mark appended. Some providers will simply append to this uri, causing URLs like https://example.com/complete/mybackend/??code=.... with two question marks. This makes the interpretation of the query string fail. The provider in this case is https://github.com/juanifioren/django-oidc-provider. Arguably that library should be smarter in constructing the redirection, but removing the question mark from the uri solves these kind of issues. Alternatively we could strip the question mark from the uri in the tornado strategy, but this seemed simpler.<commit_after>
from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ]
from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/?', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ] Fix redirect_uri issue with tornado reversed url When reversing URLs, tornado doesn't interpret the regex optional symbol '?'. This causes the redirect_uri to be https://example.com/complete/mybackend/? with the question mark appended. Some providers will simply append to this uri, causing URLs like https://example.com/complete/mybackend/??code=.... with two question marks. This makes the interpretation of the query string fail. The provider in this case is https://github.com/juanifioren/django-oidc-provider. Arguably that library should be smarter in constructing the redirection, but removing the question mark from the uri solves these kind of issues. Alternatively we could strip the question mark from the uri in the tornado strategy, but this seemed simpler.from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ]
<commit_before>from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/?', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ] <commit_msg>Fix redirect_uri issue with tornado reversed url When reversing URLs, tornado doesn't interpret the regex optional symbol '?'. This causes the redirect_uri to be https://example.com/complete/mybackend/? with the question mark appended. Some providers will simply append to this uri, causing URLs like https://example.com/complete/mybackend/??code=.... with two question marks. This makes the interpretation of the query string fail. The provider in this case is https://github.com/juanifioren/django-oidc-provider. Arguably that library should be smarter in constructing the redirection, but removing the question mark from the uri solves these kind of issues. Alternatively we could strip the question mark from the uri in the tornado strategy, but this seemed simpler.<commit_after>from tornado.web import url from .handlers import AuthHandler, CompleteHandler, DisconnectHandler SOCIAL_AUTH_ROUTES = [ url(r'/login/(?P<backend>[^/]+)/?', AuthHandler, name='begin'), url(r'/complete/(?P<backend>[^/]+)/', CompleteHandler, name='complete'), url(r'/disconnect/(?P<backend>[^/]+)/?', DisconnectHandler, name='disconnect'), url(r'/disconnect/(?P<backend>[^/]+)/(?P<association_id>\d+)/?', DisconnectHandler, name='disconect_individual'), ]
cd2c09f8902de59d9bdfa1e52ee65ea974c56412
st2common/st2common/__init__.py
st2common/st2common/__init__.py
# Copyright 2020 StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev'
# Copyright 2020 The StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev'
Add Copyright headers example for StackStorm
Add Copyright headers example for StackStorm
Python
apache-2.0
Plexxi/st2,StackStorm/st2,nzlosh/st2,StackStorm/st2,nzlosh/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,Plexxi/st2,Plexxi/st2,nzlosh/st2,Plexxi/st2
# Copyright 2020 StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev' Add Copyright headers example for StackStorm
# Copyright 2020 The StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev'
<commit_before># Copyright 2020 StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev' <commit_msg>Add Copyright headers example for StackStorm<commit_after>
# Copyright 2020 The StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev'
# Copyright 2020 StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev' Add Copyright headers example for StackStorm# Copyright 2020 The StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev'
<commit_before># Copyright 2020 StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev' <commit_msg>Add Copyright headers example for StackStorm<commit_after># Copyright 2020 The StackStorm Authors. # Copyright 2019 Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __version__ = '3.3dev'
8ba03f6be64ee12634183e0b5c5f3aa3b6014b94
linguine/ops/StanfordCoreNLP.py
linguine/ops/StanfordCoreNLP.py
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... print "Some stuff" print os.path.abspath(__file__) coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') print coreNLPPath self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar') self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath, coreNLPModelsPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results
Add coreNLP models jar relative path as well
Add coreNLP models jar relative path as well
Python
mit
rigatoni/linguine-python,Pastafarians/linguine-python
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... print "Some stuff" print os.path.abspath(__file__) coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') print coreNLPPath self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results Add coreNLP models jar relative path as well
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar') self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath, coreNLPModelsPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results
<commit_before>#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... print "Some stuff" print os.path.abspath(__file__) coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') print coreNLPPath self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results <commit_msg>Add coreNLP models jar relative path as well<commit_after>
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar') self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath, coreNLPModelsPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results
#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... print "Some stuff" print os.path.abspath(__file__) coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') print coreNLPPath self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results Add coreNLP models jar relative path as well#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar') self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath, coreNLPModelsPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results
<commit_before>#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... print "Some stuff" print os.path.abspath(__file__) coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') print coreNLPPath self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results <commit_msg>Add coreNLP models jar relative path as well<commit_after>#!/usr/bin/env python import os """ Performs some core NLP operations as a proof of concept for the library. """ from stanford_corenlp_pywrapper import CoreNLP class StanfordCoreNLP: def __init__(self): # I don't see anywhere to put properties like this path... # For now it's hardcoded and would need to be changed when deployed... coreNLPPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLP.jar') coreNLPModelsPath = os.path.join(os.path.dirname(__file__), '../../lib/stanfordCoreNLPModels.jar') self.proc = CoreNLP('pos', corenlp_jars=[coreNLPPath, coreNLPModelsPath]) def run(self, data): results = [] for corpus in data: results.append(self.proc.parse_doc(corpus.contents)) return results
fde1aa2a56af7c63866af98a249f8a5413981437
mr/tests/test_plot_traj_labeling.py
mr/tests/test_plot_traj_labeling.py
import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.load(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors?
import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.read_pickle(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors?
Replace deprecated pd.load with pd.read_pickle.
Replace deprecated pd.load with pd.read_pickle.
Python
bsd-3-clause
daniorerio/trackpy,daniorerio/trackpy
import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.load(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors? Replace deprecated pd.load with pd.read_pickle.
import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.read_pickle(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors?
<commit_before>import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.load(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors? <commit_msg>Replace deprecated pd.load with pd.read_pickle.<commit_after>
import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.read_pickle(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors?
import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.load(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors? Replace deprecated pd.load with pd.read_pickle.import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.read_pickle(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors?
<commit_before>import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.load(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors? <commit_msg>Replace deprecated pd.load with pd.read_pickle.<commit_after>import unittest from numpy.testing.decorators import slow import os import pandas as pd from mr import ptraj path, _ = os.path.split(os.path.abspath(__file__)) class TestLabeling(unittest.TestCase): def setUp(self): self.sparse = pd.read_pickle(os.path.join(path, 'misc', 'sparse_trajectories.df')) @slow def test_labeling_sparse_trajectories(self): ptraj(self.sparse, label=True) # No errors?
0ba0d7d1e0b19ef0523c66726cff637018703b4a
tests/test_requesthandler.py
tests/test_requesthandler.py
from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 1) self.assertIsInstance(answer[0].tree, Sentence) result = answer[0].tree.__getattr__('value') self.assertEqual(result, expected) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0)
from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 1, answer) self.assertEqual(answer[0].tree, Sentence(value=expected)) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer)
Write tests in a better way
Write tests in a better way
Python
mit
ProjetPP/PPP-Spell-Checker,ProjetPP/PPP-Spell-Checker
from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 1) self.assertIsInstance(answer[0].tree, Sentence) result = answer[0].tree.__getattr__('value') self.assertEqual(result, expected) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) Write tests in a better way
from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 1, answer) self.assertEqual(answer[0].tree, Sentence(value=expected)) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer)
<commit_before>from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 1) self.assertIsInstance(answer[0].tree, Sentence) result = answer[0].tree.__getattr__('value') self.assertEqual(result, expected) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) <commit_msg>Write tests in a better way<commit_after>
from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 1, answer) self.assertEqual(answer[0].tree, Sentence(value=expected)) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer)
from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 1) self.assertIsInstance(answer[0].tree, Sentence) result = answer[0].tree.__getattr__('value') self.assertEqual(result, expected) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) Write tests in a better wayfrom unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 1, answer) self.assertEqual(answer[0].tree, Sentence(value=expected)) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer)
<commit_before>from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 1) self.assertIsInstance(answer[0].tree, Sentence) result = answer[0].tree.__getattr__('value') self.assertEqual(result, expected) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEquals(len(answer), 0) <commit_msg>Write tests in a better way<commit_after>from unittest import TestCase from ppp_datamodel.communication import Request from ppp_datamodel import Triple, Resource, Missing, Sentence from ppp_libmodule.tests import PPPTestCase from ppp_spell_checker import app class RequestHandlerTest(PPPTestCase(app)): def testCorrectSentence(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer) def testWrongSentence(self): original = 'Who is the pesident of the United States' expected = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'sentence', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 1, answer) self.assertEqual(answer[0].tree, Sentence(value=expected)) def testIrrelevantInput(self): original = 'Who is the president of the United States' j = {'id': '1', 'language': 'en', 'measures': {}, 'trace': [], 'tree': {'type': 'resource', 'value': original}} answer = self.request(j) self.assertEqual(len(answer), 0, answer)
b6ebb7936e19389ee132b6f0bfbeb1ba7441f95a
tmaps/extensions/__init__.py
tmaps/extensions/__init__.py
from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket()
from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from gc3pie import GC3Pie gc3pie = GC3Pie() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket()
Fix import of tmaps extensions
Fix import of tmaps extensions
Python
agpl-3.0
TissueMAPS/TmServer
from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket() Fix import of tmaps extensions
from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from gc3pie import GC3Pie gc3pie = GC3Pie() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket()
<commit_before>from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket() <commit_msg>Fix import of tmaps extensions<commit_after>
from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from gc3pie import GC3Pie gc3pie = GC3Pie() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket()
from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket() Fix import of tmaps extensionsfrom flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from gc3pie import GC3Pie gc3pie = GC3Pie() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket()
<commit_before>from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from .gc3pie import GC3PieEngine gc3pie_engine = GC3PieEngine() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket() <commit_msg>Fix import of tmaps extensions<commit_after>from flask.ext.redis import FlaskRedis redis_store = FlaskRedis() from flask.ext.sqlalchemy import SQLAlchemy db = SQLAlchemy() from auth import jwt from spark import Spark spark = Spark() from gc3pie import GC3Pie gc3pie = GC3Pie() from flask.ext.uwsgi_websocket import GeventWebSocket websocket = GeventWebSocket()
ac462d27b4242a9e2ee04c052da6b832ae3d0df7
plugins/Tools/TranslateTool/__init__.py
plugins/Tools/TranslateTool/__init__.py
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": 3 } } def register(app): return { "tool": TranslateTool.TranslateTool() }
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": -1 } } def register(app): return { "tool": TranslateTool.TranslateTool() }
Fix order of tools in the toolbar (translate tool on top)
Fix order of tools in the toolbar (translate tool on top) CURA-838
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": 3 } } def register(app): return { "tool": TranslateTool.TranslateTool() } Fix order of tools in the toolbar (translate tool on top) CURA-838
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": -1 } } def register(app): return { "tool": TranslateTool.TranslateTool() }
<commit_before># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": 3 } } def register(app): return { "tool": TranslateTool.TranslateTool() } <commit_msg>Fix order of tools in the toolbar (translate tool on top) CURA-838<commit_after>
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": -1 } } def register(app): return { "tool": TranslateTool.TranslateTool() }
# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": 3 } } def register(app): return { "tool": TranslateTool.TranslateTool() } Fix order of tools in the toolbar (translate tool on top) CURA-838# Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": -1 } } def register(app): return { "tool": TranslateTool.TranslateTool() }
<commit_before># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": 3 } } def register(app): return { "tool": TranslateTool.TranslateTool() } <commit_msg>Fix order of tools in the toolbar (translate tool on top) CURA-838<commit_after># Copyright (c) 2015 Ultimaker B.V. # Uranium is released under the terms of the AGPLv3 or higher. from . import TranslateTool from UM.i18n import i18nCatalog i18n_catalog = i18nCatalog("uranium") def getMetaData(): return { "type": "tool", "plugin": { "name": i18n_catalog.i18nc("@label", "Translate Tool"), "author": "Ultimaker", "version": "1.0", "description": i18n_catalog.i18nc("@info:whatsthis", "Provides the Translate tool."), "api": 2 }, "tool": { "name": i18n_catalog.i18nc("@action:button", "Translate"), "description": i18n_catalog.i18nc("@info:tooltip", "Translate Object"), "icon": "translate", "tool_panel": "TranslateTool.qml", "weight": -1 } } def register(app): return { "tool": TranslateTool.TranslateTool() }
69b3911b6aa13a6420a1b9dc3117164f6bf8330f
PyFVCOM/__init__.py
PyFVCOM/__init__.py
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.') # For backwards-compatibility. process_FVCOM_results = process_results read_FVCOM_results = read_results
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.')
Remove the backwards compatibility as it wasn't working for some reason.
Remove the backwards compatibility as it wasn't working for some reason.
Python
mit
pwcazenave/PyFVCOM
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.') # For backwards-compatibility. process_FVCOM_results = process_results read_FVCOM_results = read_results Remove the backwards compatibility as it wasn't working for some reason.
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.')
<commit_before>""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.') # For backwards-compatibility. process_FVCOM_results = process_results read_FVCOM_results = read_results <commit_msg>Remove the backwards compatibility as it wasn't working for some reason.<commit_after>
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.')
""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.') # For backwards-compatibility. process_FVCOM_results = process_results read_FVCOM_results = read_results Remove the backwards compatibility as it wasn't working for some reason.""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.')
<commit_before>""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.') # For backwards-compatibility. process_FVCOM_results = process_results read_FVCOM_results = read_results <commit_msg>Remove the backwards compatibility as it wasn't working for some reason.<commit_after>""" The FVCOM Python toolbox (PyFVCOM) """ __version__ = '1.3.4' __author__ = 'Pierre Cazenave' __credits__ = ['Pierre Cazenave'] __license__ = 'MIT' __maintainer__ = 'Pierre Cazenave' __email__ = 'pica@pml.ac.uk' import inspect from warnings import warn # Import everything! from PyFVCOM import buoy_tools from PyFVCOM import cst_tools from PyFVCOM import ctd_tools from PyFVCOM import grid_tools from PyFVCOM import ll2utm from PyFVCOM import ocean_tools from PyFVCOM import stats_tools from PyFVCOM import tide_tools from PyFVCOM import tidal_ellipse from PyFVCOM import process_results from PyFVCOM import read_results # External TAPPY now instead of my bundled version. Requires my forked version # of TAPPY from https://github.com/pwcazenave/tappy or # http://gitlab.em.pml.ac.uk/pica/tappy. try: from tappy import tappy except ImportError: raise ImportError('TAPPY not found. Please install it from http://gitlab.em.pml.ac.uk/pica/tappy or https://github.com/pwcazenave/tappy.')
f967ba433284c573cbce47d84ae55c209801ad6e
ash/PRESUBMIT.py
ash/PRESUBMIT.py
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos']
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos', 'linux_chromeos_clang']
Add linux_chromeos_clang to the list of automatic trybots.
Add linux_chromeos_clang to the list of automatic trybots. BUG=none TEST=none Review URL: https://chromiumcodereview.appspot.com/10833037 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@148600 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
krieger-od/nwjs_chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,M4sse/chromium.src,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,anirudhSK/chromium,M4sse/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,ondra-novak/chromium.src,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,patrickm/chromium.src,nacl-webkit/chrome_deps,littlstar/chromium.src,zcbenz/cefode-chromium,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,anirudhSK/chromium,mogoweb/chromium-crosswalk,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,ltilve/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,dednal/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,anirudhSK/chromium,Fireblend/chromium-crosswalk,Chilledheart/chromium,pozdnyakov/chromium-crosswalk,dednal/chromium.src,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,fujunwei/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,jaruba/chromium.src,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,hujiajie/pa-chromium,M4sse/chromium.src,mogoweb/chromium-crosswalk,markYoungH/chromium.src,Just-D/chromium-1,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,hujiajie/pa-chromium,dushu1203/chromium.src,anirudhSK/chromium,ltilve/chromium,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,anirudhSK/chromium,jaruba/chromium.src,dushu1203/chromium.src,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,patrickm/chromium.src,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,Chilledheart/chromium,dushu1203/chromium.src,anirudhSK/chromium,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,dednal/chromium.src,hgl888/chromium-crosswalk,patrickm/chromium.src,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,littlstar/chromium.src,junmin-zhu/chromium-rivertrail,Chilledheart/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,littlstar/chromium.src,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,anirudhSK/chromium,Just-D/chromium-1,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,anirudhSK/chromium,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk-efl,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,timopulkkinen/BubbleFish,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,dushu1203/chromium.src,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,hgl888/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,timopulkkinen/BubbleFish,jaruba/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,Jonekee/chromium.src,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,Chilledheart/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,littlstar/chromium.src,littlstar/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,Just-D/chromium-1,dushu1203/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,jaruba/chromium.src,nacl-webkit/chrome_deps,PeterWangIntel/chromium-crosswalk,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,fujunwei/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,ltilve/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,anirudhSK/chromium,ltilve/chromium,Fireblend/chromium-crosswalk,ltilve/chromium,patrickm/chromium.src,mogoweb/chromium-crosswalk,junmin-zhu/chromium-rivertrail,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,chuan9/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,markYoungH/chromium.src,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,timopulkkinen/BubbleFish,chuan9/chromium-crosswalk,dushu1203/chromium.src,jaruba/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,timopulkkinen/BubbleFish,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,littlstar/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,timopulkkinen/BubbleFish,hujiajie/pa-chromium,Just-D/chromium-1,ltilve/chromium,jaruba/chromium.src,dednal/chromium.src,hujiajie/pa-chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos'] Add linux_chromeos_clang to the list of automatic trybots. BUG=none TEST=none Review URL: https://chromiumcodereview.appspot.com/10833037 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@148600 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos', 'linux_chromeos_clang']
<commit_before># Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos'] <commit_msg>Add linux_chromeos_clang to the list of automatic trybots. BUG=none TEST=none Review URL: https://chromiumcodereview.appspot.com/10833037 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@148600 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos', 'linux_chromeos_clang']
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos'] Add linux_chromeos_clang to the list of automatic trybots. BUG=none TEST=none Review URL: https://chromiumcodereview.appspot.com/10833037 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@148600 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos', 'linux_chromeos_clang']
<commit_before># Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos'] <commit_msg>Add linux_chromeos_clang to the list of automatic trybots. BUG=none TEST=none Review URL: https://chromiumcodereview.appspot.com/10833037 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@148600 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/ash See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(): return ['linux_chromeos', 'linux_chromeos_clang']
fac9a9a461049b164d478c58435725979b46d400
usingnamespace/forms/csrf.py
usingnamespace/forms/csrf.py
# File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): def validate_csrf(node, value): request = kw.get('request') csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() )
# File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') def validate_csrf(node, value): csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() )
Add some checking that stuff is bound
Add some checking that stuff is bound Specifically we require that request is bound to the form.
Python
isc
usingnamespace/usingnamespace
# File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): def validate_csrf(node, value): request = kw.get('request') csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() ) Add some checking that stuff is bound Specifically we require that request is bound to the form.
# File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') def validate_csrf(node, value): csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() )
<commit_before># File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): def validate_csrf(node, value): request = kw.get('request') csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() ) <commit_msg>Add some checking that stuff is bound Specifically we require that request is bound to the form.<commit_after>
# File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') def validate_csrf(node, value): csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() )
# File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): def validate_csrf(node, value): request = kw.get('request') csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() ) Add some checking that stuff is bound Specifically we require that request is bound to the form.# File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') def validate_csrf(node, value): csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() )
<commit_before># File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): def validate_csrf(node, value): request = kw.get('request') csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() ) <commit_msg>Add some checking that stuff is bound Specifically we require that request is bound to the form.<commit_after># File: csrf.py # Author: Bert JW Regeer <bertjw@regeer.org> # Created: 2013-01-26 import colander import deform @colander.deferred def deferred_csrf_default(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') csrf_token = request.session.get_csrf_token() return csrf_token @colander.deferred def deferred_csrf_validator(node, kw): request = kw.get('request') if request is None: raise KeyError('Require bind: request') def validate_csrf(node, value): csrf_token = request.session.get_csrf_token() if value != csrf_token: raise colander.Invalid(node, _('Invalid cross-site scripting token')) return validate_csrf class CSRFSchema(colander.Schema): csrf_token = colander.SchemaNode( colander.String(), default=deferred_csrf_default, validator=deferred_csrf_validator, widget=deform.widget.HiddenWidget() )
ee7d663f3c7e5c52581527167938d81ca2a07a3d
bisnode/models.py
bisnode/models.py
from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save()
from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10, unique=True) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save()
Make Organization Number a unique field
Make Organization Number a unique field
Python
mit
FundedByMe/django-bisnode
from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save() Make Organization Number a unique field
from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10, unique=True) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save()
<commit_before>from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save() <commit_msg>Make Organization Number a unique field<commit_after>
from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10, unique=True) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save()
from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save() Make Organization Number a unique fieldfrom datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10, unique=True) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save()
<commit_before>from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save() <commit_msg>Make Organization Number a unique field<commit_after>from datetime import datetime from django.db import models from .constants import COMPANY_RATING_REPORT, RATING_CHOICES from .bisnode import get_bisnode_company_report def bisnode_date_to_date(bisnode_date): formatted_datetime = datetime.strptime(bisnode_date, "%Y%m%d") return formatted_datetime.date() class BisnodeRatingReport(models.Model): organization_number = models.CharField(max_length=10, unique=True) rating = models.CharField(max_length=3, choices=RATING_CHOICES, null=True, blank=True) date_of_rating = models.DateField(blank=True, null=True) registration_date = models.DateField(blank=True, null=True) last_updated = models.DateTimeField(auto_now=True) def get(self): rating_report = get_bisnode_company_report( report_type=COMPANY_RATING_REPORT, organization_number=self.organization_number) company_data = rating_report.generalCompanyData[0] self.rating = company_data['ratingCode'] self.date_of_rating = bisnode_date_to_date( company_data['dateOfRating']) self.registration_date = bisnode_date_to_date( company_data['dateReg']) self.save()
4078dcd4a35dd09c610bb5e9298a87828a0acf8e
apps/core/models.py
apps/core/models.py
from django.db import models # Create your models here.
from django.db import models from django.utils.timezone import now class DateTimeCreatedField(models.DateTimeField): """ DateTimeField that by default, sets editable=False, blank=True, default=now. """ def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('default', now) super(DateTimeCreatedField, self).__init__(*args, **kwargs) def get_internal_type(self): return "DateTimeField" def south_field_triple(self): "Returns a suitable description of this field for South." from south.modelsinspector import introspector field_class = "django.db.models.fields.DateTimeField" args, kwargs = introspector(self) return (field_class, args, kwargs) class DateTimeModifiedField(DateTimeCreatedField): """ DateTimeField that by default, sets editable=False, blank=True, default=datetime.now. Sets value to now() on each save of the model. """ def pre_save(self, model, add): value = now() setattr(model, self.attname, value) return value class BaseModel(models.Model): """ An abstract base class model that provides: - date_created - date_modified """ date_created = DateTimeCreatedField() date_modified = DateTimeModifiedField() class Meta: get_latest_by = 'date_modified' ordering = ('-date_modified', '-date_created',) abstract = True
Implement an abstract base class model
Implement an abstract base class model
Python
mit
SoPR/horas,SoPR/horas,SoPR/horas,SoPR/horas
from django.db import models # Create your models here. Implement an abstract base class model
from django.db import models from django.utils.timezone import now class DateTimeCreatedField(models.DateTimeField): """ DateTimeField that by default, sets editable=False, blank=True, default=now. """ def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('default', now) super(DateTimeCreatedField, self).__init__(*args, **kwargs) def get_internal_type(self): return "DateTimeField" def south_field_triple(self): "Returns a suitable description of this field for South." from south.modelsinspector import introspector field_class = "django.db.models.fields.DateTimeField" args, kwargs = introspector(self) return (field_class, args, kwargs) class DateTimeModifiedField(DateTimeCreatedField): """ DateTimeField that by default, sets editable=False, blank=True, default=datetime.now. Sets value to now() on each save of the model. """ def pre_save(self, model, add): value = now() setattr(model, self.attname, value) return value class BaseModel(models.Model): """ An abstract base class model that provides: - date_created - date_modified """ date_created = DateTimeCreatedField() date_modified = DateTimeModifiedField() class Meta: get_latest_by = 'date_modified' ordering = ('-date_modified', '-date_created',) abstract = True
<commit_before>from django.db import models # Create your models here. <commit_msg>Implement an abstract base class model<commit_after>
from django.db import models from django.utils.timezone import now class DateTimeCreatedField(models.DateTimeField): """ DateTimeField that by default, sets editable=False, blank=True, default=now. """ def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('default', now) super(DateTimeCreatedField, self).__init__(*args, **kwargs) def get_internal_type(self): return "DateTimeField" def south_field_triple(self): "Returns a suitable description of this field for South." from south.modelsinspector import introspector field_class = "django.db.models.fields.DateTimeField" args, kwargs = introspector(self) return (field_class, args, kwargs) class DateTimeModifiedField(DateTimeCreatedField): """ DateTimeField that by default, sets editable=False, blank=True, default=datetime.now. Sets value to now() on each save of the model. """ def pre_save(self, model, add): value = now() setattr(model, self.attname, value) return value class BaseModel(models.Model): """ An abstract base class model that provides: - date_created - date_modified """ date_created = DateTimeCreatedField() date_modified = DateTimeModifiedField() class Meta: get_latest_by = 'date_modified' ordering = ('-date_modified', '-date_created',) abstract = True
from django.db import models # Create your models here. Implement an abstract base class modelfrom django.db import models from django.utils.timezone import now class DateTimeCreatedField(models.DateTimeField): """ DateTimeField that by default, sets editable=False, blank=True, default=now. """ def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('default', now) super(DateTimeCreatedField, self).__init__(*args, **kwargs) def get_internal_type(self): return "DateTimeField" def south_field_triple(self): "Returns a suitable description of this field for South." from south.modelsinspector import introspector field_class = "django.db.models.fields.DateTimeField" args, kwargs = introspector(self) return (field_class, args, kwargs) class DateTimeModifiedField(DateTimeCreatedField): """ DateTimeField that by default, sets editable=False, blank=True, default=datetime.now. Sets value to now() on each save of the model. """ def pre_save(self, model, add): value = now() setattr(model, self.attname, value) return value class BaseModel(models.Model): """ An abstract base class model that provides: - date_created - date_modified """ date_created = DateTimeCreatedField() date_modified = DateTimeModifiedField() class Meta: get_latest_by = 'date_modified' ordering = ('-date_modified', '-date_created',) abstract = True
<commit_before>from django.db import models # Create your models here. <commit_msg>Implement an abstract base class model<commit_after>from django.db import models from django.utils.timezone import now class DateTimeCreatedField(models.DateTimeField): """ DateTimeField that by default, sets editable=False, blank=True, default=now. """ def __init__(self, *args, **kwargs): kwargs.setdefault('editable', False) kwargs.setdefault('blank', True) kwargs.setdefault('default', now) super(DateTimeCreatedField, self).__init__(*args, **kwargs) def get_internal_type(self): return "DateTimeField" def south_field_triple(self): "Returns a suitable description of this field for South." from south.modelsinspector import introspector field_class = "django.db.models.fields.DateTimeField" args, kwargs = introspector(self) return (field_class, args, kwargs) class DateTimeModifiedField(DateTimeCreatedField): """ DateTimeField that by default, sets editable=False, blank=True, default=datetime.now. Sets value to now() on each save of the model. """ def pre_save(self, model, add): value = now() setattr(model, self.attname, value) return value class BaseModel(models.Model): """ An abstract base class model that provides: - date_created - date_modified """ date_created = DateTimeCreatedField() date_modified = DateTimeModifiedField() class Meta: get_latest_by = 'date_modified' ordering = ('-date_modified', '-date_created',) abstract = True
66cc06698d062a679bb0130b435c7d344116c664
test.py
test.py
import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures)
import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), TEMPLATES=[ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True } ], ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures)
Add TEMPLATES configuration for Django v1.10 purposes.
Add TEMPLATES configuration for Django v1.10 purposes.
Python
mit
discolabs/django-shopify-auth,discolabs/django-shopify-auth
import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures) Add TEMPLATES configuration for Django v1.10 purposes.
import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), TEMPLATES=[ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True } ], ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures)
<commit_before>import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures) <commit_msg>Add TEMPLATES configuration for Django v1.10 purposes.<commit_after>
import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), TEMPLATES=[ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True } ], ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures)
import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures) Add TEMPLATES configuration for Django v1.10 purposes.import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), TEMPLATES=[ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True } ], ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures)
<commit_before>import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures) <commit_msg>Add TEMPLATES configuration for Django v1.10 purposes.<commit_after>import sys import django from django.conf import settings settings.configure( DEBUG=True, DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, INSTALLED_APPS=( 'django.contrib.auth', 'django.contrib.contenttypes', 'shopify_auth', ), AUTHENTICATION_BACKENDS=( 'shopify_auth.backends.ShopUserBackend', ), MIDDLEWARE_CLASSES=( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ), TEMPLATES=[ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True } ], ROOT_URLCONF = 'shopify_auth.urls', SHOPIFY_APP_NAME='Test App', SHOPIFY_APP_API_KEY='test-api-key', SHOPIFY_APP_API_SECRET='test-api-secret', SHOPIFY_APP_API_SCOPE='read_products', SHOPIFY_APP_IS_EMBEDDED=True, SHOPIFY_APP_DEV_MODE=False, ) django.setup() from django.test.runner import DiscoverRunner test_runner = DiscoverRunner() failures = test_runner.run_tests(['shopify_auth']) if failures: sys.exit(failures)
9230d01bb88aff393d7c0f71a2417396d6c7a968
timpani/settings.py
timpani/settings.py
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.add(settingObj) databaseConnection.session.commit()
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit()
Use merge instead of add in setSettingValue
Use merge instead of add in setSettingValue
Python
mit
ollien/Timpani,ollien/Timpani,ollien/Timpani
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.add(settingObj) databaseConnection.session.commit() Use merge instead of add in setSettingValue
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit()
<commit_before>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.add(settingObj) databaseConnection.session.commit() <commit_msg>Use merge instead of add in setSettingValue<commit_after>
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit()
from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.add(settingObj) databaseConnection.session.commit() Use merge instead of add in setSettingValuefrom . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit()
<commit_before>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.add(settingObj) databaseConnection.session.commit() <commit_msg>Use merge instead of add in setSettingValue<commit_after>from . import database def getAllSettings(): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting) settings = query.all() return {setting.name: setting.value for setting in settings} def getSettingValue(name): databaseConnection = database.ConnectionManager.getConnection("main") query = databaseConnection.session.query(database.tables.Setting).filter(database.tables.Setting.name == name) if query.count() > 0: return query.first().value return None def setSettingValue(name, value): databaseConnection = database.ConnectionManager.getConnection("main") settingObj = database.tables.Setting(name = name, value = value) databaseConnection.session.merge(settingObj) databaseConnection.session.commit()
99bd465550fd8c085cb0317b53647de75cf4eee7
urls.py
urls.py
# -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url('r^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), )
# -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url(r'^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), )
Fix wrong url regxp for accoutns
Fix wrong url regxp for accoutns
Python
agpl-3.0
datea/datea-api,lafactura/datea-api,lafactura/datea-api,datea/datea-api,datea/datea-api,lafactura/datea-api
# -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url('r^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), ) Fix wrong url regxp for accoutns
# -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url(r'^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), )
<commit_before># -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url('r^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), ) <commit_msg>Fix wrong url regxp for accoutns<commit_after>
# -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url(r'^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), )
# -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url('r^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), ) Fix wrong url regxp for accoutns# -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url(r'^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), )
<commit_before># -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url('r^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), ) <commit_msg>Fix wrong url regxp for accoutns<commit_after># -*- coding: utf-8 -*- from django.conf.urls import patterns, include, url from django.contrib import admin admin.autodiscover() urlpatterns = patterns( '', url(r'^', include('api.urls')), url(r'^accounts/', include('accounts.urls')), url('', include('social.apps.django_app.urls', namespace='social')), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), url(r'^admin/', include(admin.site.urls)), )
16b69b7a70d0f5f2dc198e09ad8b5d0e9997aba3
src/bindings/python/__init__.py
src/bindings/python/__init__.py
import os import platform import sys if platform.system() == "Windows": os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin"))
import os import platform import sys if platform.system() == "Windows": version_info = sys.version_info if sys.version_info.major == 3 && sys.version_info.minor < 8: sys.path.append(os.path.join(sys.prefix, "Library", "bin")) else: os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin"))
Make aether package initialisation work for both Python 3.7 and 3.8.
Make aether package initialisation work for both Python 3.7 and 3.8.
Python
apache-2.0
LungNoodle/lungsim,LungNoodle/lungsim,LungNoodle/lungsim
import os import platform import sys if platform.system() == "Windows": os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin")) Make aether package initialisation work for both Python 3.7 and 3.8.
import os import platform import sys if platform.system() == "Windows": version_info = sys.version_info if sys.version_info.major == 3 && sys.version_info.minor < 8: sys.path.append(os.path.join(sys.prefix, "Library", "bin")) else: os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin"))
<commit_before>import os import platform import sys if platform.system() == "Windows": os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin")) <commit_msg>Make aether package initialisation work for both Python 3.7 and 3.8.<commit_after>
import os import platform import sys if platform.system() == "Windows": version_info = sys.version_info if sys.version_info.major == 3 && sys.version_info.minor < 8: sys.path.append(os.path.join(sys.prefix, "Library", "bin")) else: os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin"))
import os import platform import sys if platform.system() == "Windows": os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin")) Make aether package initialisation work for both Python 3.7 and 3.8.import os import platform import sys if platform.system() == "Windows": version_info = sys.version_info if sys.version_info.major == 3 && sys.version_info.minor < 8: sys.path.append(os.path.join(sys.prefix, "Library", "bin")) else: os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin"))
<commit_before>import os import platform import sys if platform.system() == "Windows": os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin")) <commit_msg>Make aether package initialisation work for both Python 3.7 and 3.8.<commit_after>import os import platform import sys if platform.system() == "Windows": version_info = sys.version_info if sys.version_info.major == 3 && sys.version_info.minor < 8: sys.path.append(os.path.join(sys.prefix, "Library", "bin")) else: os.add_dll_directory(os.path.join(sys.prefix, "Library", "bin"))
c73bae428fa6ecf4c280a991ed8482250e4d8747
jarn/mkrelease/process.py
jarn/mkrelease/process.py
import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.items()) + cmd return os.system(cmd)
import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.iteritems()) + cmd return os.system(cmd)
Use iteritems in Python 2.
Use iteritems in Python 2.
Python
bsd-2-clause
Jarn/jarn.mkrelease
import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.items()) + cmd return os.system(cmd) Use iteritems in Python 2.
import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.iteritems()) + cmd return os.system(cmd)
<commit_before>import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.items()) + cmd return os.system(cmd) <commit_msg>Use iteritems in Python 2.<commit_after>
import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.iteritems()) + cmd return os.system(cmd)
import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.items()) + cmd return os.system(cmd) Use iteritems in Python 2.import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.iteritems()) + cmd return os.system(cmd)
<commit_before>import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.items()) + cmd return os.system(cmd) <commit_msg>Use iteritems in Python 2.<commit_after>import os import tee class Process(object): """Process related functions using the tee module.""" def __init__(self, quiet=False, env=None): self.quiet = quiet self.env = env def popen(self, cmd, echo=True, echo2=True): # env *replaces* os.environ if self.quiet: echo = echo2 = False return tee.popen(cmd, echo, echo2, env=self.env) def pipe(self, cmd): rc, lines = self.popen(cmd, echo=False) if rc == 0 and lines: return lines[0] return '' def system(self, cmd): rc, lines = self.popen(cmd) return rc def os_system(self, cmd): # env *updates* os.environ if self.quiet: cmd = cmd + ' >%s 2>&1' % os.devnull if self.env: cmd = ''.join('export %s="%s"\n' % (k, v) for k, v in self.env.iteritems()) + cmd return os.system(cmd)
01d49c43594829c5ba10c4dcb6db00bfd5e5fb61
lcp/settings/base.py
lcp/settings/base.py
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY']
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] # Other settings must explicitly opt-in for debug mode. DEBUG = False
Make the default for DEBUG be False.
Make the default for DEBUG be False.
Python
bsd-2-clause
mblayman/lcp,mblayman/lcp,mblayman/lcp
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] Make the default for DEBUG be False.
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] # Other settings must explicitly opt-in for debug mode. DEBUG = False
<commit_before>import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] <commit_msg>Make the default for DEBUG be False.<commit_after>
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] # Other settings must explicitly opt-in for debug mode. DEBUG = False
import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] Make the default for DEBUG be False.import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] # Other settings must explicitly opt-in for debug mode. DEBUG = False
<commit_before>import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] <commit_msg>Make the default for DEBUG be False.<commit_after>import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) SECRET_KEY = os.environ['SECRET_KEY'] # Other settings must explicitly opt-in for debug mode. DEBUG = False
1f8b37789fd656d80c0424bbd56c3d0b40aa07de
lexicon/discovery.py
lexicon/discovery.py
""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True
""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement.name) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True
Fix resolution of dependencies in a regular install of lexicon distribution
Fix resolution of dependencies in a regular install of lexicon distribution
Python
mit
AnalogJ/lexicon,AnalogJ/lexicon
""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True Fix resolution of dependencies in a regular install of lexicon distribution
""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement.name) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True
<commit_before>""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True <commit_msg>Fix resolution of dependencies in a regular install of lexicon distribution<commit_after>
""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement.name) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True
""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True Fix resolution of dependencies in a regular install of lexicon distribution""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement.name) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True
<commit_before>""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True <commit_msg>Fix resolution of dependencies in a regular install of lexicon distribution<commit_after>""" This module takes care of finding information about the runtime of Lexicon: * what are the providers installed, and available * what is the version of Lexicon """ import pkgutil import pkg_resources from lexicon import providers def find_providers(): """Find all providers registered in Lexicon, and their availability""" providers_list = sorted({modname for (_, modname, _) in pkgutil.iter_modules(providers.__path__) if modname != 'base'}) try: distribution = pkg_resources.get_distribution('dns-lexicon') except pkg_resources.DistributionNotFound: return {provider: True for provider in providers_list} else: return {provider: _resolve_requirements(provider, distribution) for provider in providers_list} def lexicon_version(): """Retrieve current Lexicon version""" try: return pkg_resources.get_distribution('dns-lexicon').version except pkg_resources.DistributionNotFound: return 'unknown' def _resolve_requirements(provider, distribution): try: requirements = distribution.requires([provider]) except pkg_resources.UnknownExtra: # No extra for this provider return True else: # Extra is defined try: for requirement in requirements: pkg_resources.get_distribution(requirement.name) except (pkg_resources.DistributionNotFound, pkg_resources.VersionConflict): # At least one extra requirement is not fulfilled return False return True
9c4aefb8ea88fd5505602c95f4762fdeb3aea183
oslo_versionedobjects/_utils.py
oslo_versionedobjects/_utils.py
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' st += ('Z' if tz == 'UTC' else tz) return st
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' # Need to handle either iso8601 or python UTC format st += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz) return st
Handle TZ change in iso8601 >=0.1.12
Handle TZ change in iso8601 >=0.1.12 The iso8601 lib introduced a change such that if running on python 3.2 or later it internally uses the python timezone information instead of its own implementation. This does not change direct date handling, but when converting this value there is a slight difference where now python 2.x will show UTC times as "UTC", but on python 3 they will end up with "UTC+00:00". The to_primitive call for DateTime fields was doing an exact match on "UTC" to determine whether to include "Z" in the resulting string. This updates that handling to recognize either of the new values. Change-Id: I71b58e8fd8fee8a57ee275ff3e0b77f165eca836 Closes-bug: #1744160
Python
apache-2.0
openstack/oslo.versionedobjects
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' st += ('Z' if tz == 'UTC' else tz) return st Handle TZ change in iso8601 >=0.1.12 The iso8601 lib introduced a change such that if running on python 3.2 or later it internally uses the python timezone information instead of its own implementation. This does not change direct date handling, but when converting this value there is a slight difference where now python 2.x will show UTC times as "UTC", but on python 3 they will end up with "UTC+00:00". The to_primitive call for DateTime fields was doing an exact match on "UTC" to determine whether to include "Z" in the resulting string. This updates that handling to recognize either of the new values. Change-Id: I71b58e8fd8fee8a57ee275ff3e0b77f165eca836 Closes-bug: #1744160
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' # Need to handle either iso8601 or python UTC format st += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz) return st
<commit_before># Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' st += ('Z' if tz == 'UTC' else tz) return st <commit_msg>Handle TZ change in iso8601 >=0.1.12 The iso8601 lib introduced a change such that if running on python 3.2 or later it internally uses the python timezone information instead of its own implementation. This does not change direct date handling, but when converting this value there is a slight difference where now python 2.x will show UTC times as "UTC", but on python 3 they will end up with "UTC+00:00". The to_primitive call for DateTime fields was doing an exact match on "UTC" to determine whether to include "Z" in the resulting string. This updates that handling to recognize either of the new values. Change-Id: I71b58e8fd8fee8a57ee275ff3e0b77f165eca836 Closes-bug: #1744160<commit_after>
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' # Need to handle either iso8601 or python UTC format st += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz) return st
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' st += ('Z' if tz == 'UTC' else tz) return st Handle TZ change in iso8601 >=0.1.12 The iso8601 lib introduced a change such that if running on python 3.2 or later it internally uses the python timezone information instead of its own implementation. This does not change direct date handling, but when converting this value there is a slight difference where now python 2.x will show UTC times as "UTC", but on python 3 they will end up with "UTC+00:00". The to_primitive call for DateTime fields was doing an exact match on "UTC" to determine whether to include "Z" in the resulting string. This updates that handling to recognize either of the new values. Change-Id: I71b58e8fd8fee8a57ee275ff3e0b77f165eca836 Closes-bug: #1744160# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' # Need to handle either iso8601 or python UTC format st += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz) return st
<commit_before># Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' st += ('Z' if tz == 'UTC' else tz) return st <commit_msg>Handle TZ change in iso8601 >=0.1.12 The iso8601 lib introduced a change such that if running on python 3.2 or later it internally uses the python timezone information instead of its own implementation. This does not change direct date handling, but when converting this value there is a slight difference where now python 2.x will show UTC times as "UTC", but on python 3 they will end up with "UTC+00:00". The to_primitive call for DateTime fields was doing an exact match on "UTC" to determine whether to include "Z" in the resulting string. This updates that handling to recognize either of the new values. Change-Id: I71b58e8fd8fee8a57ee275ff3e0b77f165eca836 Closes-bug: #1744160<commit_after># Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # Copyright 2011 Justin Santa Barbara # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Utilities and helper functions.""" # ISO 8601 extended time format without microseconds _ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def isotime(at): """Stringify time in ISO 8601 format.""" st = at.strftime(_ISO8601_TIME_FORMAT) tz = at.tzinfo.tzname(None) if at.tzinfo else 'UTC' # Need to handle either iso8601 or python UTC format st += ('Z' if tz in ['UTC', 'UTC+00:00'] else tz) return st
de9873e9a4d99458ae31bd1f2c68777302e18ffd
test/skills/scheduled_skills.py
test/skills/scheduled_skills.py
from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%A, %B %d, %Y at %H:%M"))
from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%d %B, %Y at %H:%M"))
Correct test criteria for time format for scheduled skill.
Correct test criteria for time format for scheduled skill. Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.
Python
apache-2.0
aatchison/mycroft-core,Dark5ide/mycroft-core,linuxipho/mycroft-core,MycroftAI/mycroft-core,forslund/mycroft-core,aatchison/mycroft-core,forslund/mycroft-core,linuxipho/mycroft-core,MycroftAI/mycroft-core,Dark5ide/mycroft-core
from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%A, %B %d, %Y at %H:%M")) Correct test criteria for time format for scheduled skill. Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.
from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%d %B, %Y at %H:%M"))
<commit_before>from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%A, %B %d, %Y at %H:%M")) <commit_msg>Correct test criteria for time format for scheduled skill. Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.<commit_after>
from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%d %B, %Y at %H:%M"))
from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%A, %B %d, %Y at %H:%M")) Correct test criteria for time format for scheduled skill. Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%d %B, %Y at %H:%M"))
<commit_before>from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%A, %B %d, %Y at %H:%M")) <commit_msg>Correct test criteria for time format for scheduled skill. Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.<commit_after>from datetime import datetime, timedelta import unittest from mycroft.skills.scheduled_skills import ScheduledSkill from mycroft.util.log import getLogger __author__ = 'eward' logger = getLogger(__name__) class ScheduledSkillTest(unittest.TestCase): skill = ScheduledSkill(name='ScheduledSkillTest') def test_formatted_time_today_hours(self): date = datetime.now() + timedelta(hours=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 hours and 59 minutes from now") def test_formatted_time_today_min(self): date = datetime.now() + timedelta(minutes=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), "1 minutes and 59 seconds from now") def test_formatted_time_days(self): date = datetime.now() + timedelta(days=2) self.assertEquals(self.skill. get_formatted_time(float(date.strftime('%s'))), date.strftime("%d %B, %Y at %H:%M"))
e83df69d675eec01bf3253a2c7911cedb0c081af
tests/test_queryable.py
tests/test_queryable.py
from busbus.queryable import Queryable def test_queryable(): q = Queryable(xrange(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5
from busbus.queryable import Queryable from six.moves import range def test_queryable(): q = Queryable(range(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5
Fix basic test case for Queryable class in Python 3
Fix basic test case for Queryable class in Python 3
Python
mit
spaceboats/busbus
from busbus.queryable import Queryable def test_queryable(): q = Queryable(xrange(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5 Fix basic test case for Queryable class in Python 3
from busbus.queryable import Queryable from six.moves import range def test_queryable(): q = Queryable(range(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5
<commit_before>from busbus.queryable import Queryable def test_queryable(): q = Queryable(xrange(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5 <commit_msg>Fix basic test case for Queryable class in Python 3<commit_after>
from busbus.queryable import Queryable from six.moves import range def test_queryable(): q = Queryable(range(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5
from busbus.queryable import Queryable def test_queryable(): q = Queryable(xrange(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5 Fix basic test case for Queryable class in Python 3from busbus.queryable import Queryable from six.moves import range def test_queryable(): q = Queryable(range(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5
<commit_before>from busbus.queryable import Queryable def test_queryable(): q = Queryable(xrange(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5 <commit_msg>Fix basic test case for Queryable class in Python 3<commit_after>from busbus.queryable import Queryable from six.moves import range def test_queryable(): q = Queryable(range(10)).where(lambda x: x % 5 == 0) assert next(q) == 0 assert next(q) == 5
c107f80ba57847b8d195a9abeffd3d14d3048fe6
numscons/__init__.py
numscons/__init__.py
# XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools
# XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename from core.libinfo import get_paths as scons_get_paths # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools # XXX: this is ugly, better find the mathlibs with a checker # XXX: this had nothing to do here, too... def scons_get_mathlib(env): from numpy.distutils.misc_util import get_mathlibs path_list = scons_get_paths(env['include_bootstrap']) + [None] for i in path_list: try: mlib = get_mathlibs(i) return mlib except IOError: pass raise RuntimeError("FIXME: no mlib found ?")
Add more missing functions in numscons namespace
Add more missing functions in numscons namespace
Python
bsd-3-clause
cournape/numscons,cournape/numscons,cournape/numscons
# XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools Add more missing functions in numscons namespace
# XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename from core.libinfo import get_paths as scons_get_paths # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools # XXX: this is ugly, better find the mathlibs with a checker # XXX: this had nothing to do here, too... def scons_get_mathlib(env): from numpy.distutils.misc_util import get_mathlibs path_list = scons_get_paths(env['include_bootstrap']) + [None] for i in path_list: try: mlib = get_mathlibs(i) return mlib except IOError: pass raise RuntimeError("FIXME: no mlib found ?")
<commit_before># XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools <commit_msg>Add more missing functions in numscons namespace<commit_after>
# XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename from core.libinfo import get_paths as scons_get_paths # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools # XXX: this is ugly, better find the mathlibs with a checker # XXX: this had nothing to do here, too... def scons_get_mathlib(env): from numpy.distutils.misc_util import get_mathlibs path_list = scons_get_paths(env['include_bootstrap']) + [None] for i in path_list: try: mlib = get_mathlibs(i) return mlib except IOError: pass raise RuntimeError("FIXME: no mlib found ?")
# XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools Add more missing functions in numscons namespace# XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename from core.libinfo import get_paths as scons_get_paths # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools # XXX: this is ugly, better find the mathlibs with a checker # XXX: this had nothing to do here, too... def scons_get_mathlib(env): from numpy.distutils.misc_util import get_mathlibs path_list = scons_get_paths(env['include_bootstrap']) + [None] for i in path_list: try: mlib = get_mathlibs(i) return mlib except IOError: pass raise RuntimeError("FIXME: no mlib found ?")
<commit_before># XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools <commit_msg>Add more missing functions in numscons namespace<commit_after># XXX those are needed by the scons command only... from core.misc import get_scons_path, get_scons_build_dir, \ get_scons_configres_dir, get_scons_configres_filename from core.libinfo import get_paths as scons_get_paths # XXX those should not be needed by the scons command only... from core.extension import get_python_inc, get_pythonlib_dir # Those functions really belong to the public API from core.numpyenv import GetNumpyEnvironment from core.libinfo_scons import NumpyCheckLibAndHeader from checkers import CheckF77BLAS, CheckCBLAS, CheckCLAPACK, CheckF77LAPACK, CheckFFT from fortran_scons import CheckF77Mangling #import tools # XXX: this is ugly, better find the mathlibs with a checker # XXX: this had nothing to do here, too... def scons_get_mathlib(env): from numpy.distutils.misc_util import get_mathlibs path_list = scons_get_paths(env['include_bootstrap']) + [None] for i in path_list: try: mlib = get_mathlibs(i) return mlib except IOError: pass raise RuntimeError("FIXME: no mlib found ?")
de21f7802cf9124fc2bb15936d35710946deeb18
examples/asyncio/await.py
examples/asyncio/await.py
import asyncio from rx import Observable async def hello_world(): stream = Observable.just("Hello, world!") n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close()
import asyncio from rx import Observable stream = Observable.just("Hello, world!") async def hello_world(): n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close()
Move stream out of function
Move stream out of function
Python
mit
ReactiveX/RxPY,ReactiveX/RxPY
import asyncio from rx import Observable async def hello_world(): stream = Observable.just("Hello, world!") n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close() Move stream out of function
import asyncio from rx import Observable stream = Observable.just("Hello, world!") async def hello_world(): n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close()
<commit_before>import asyncio from rx import Observable async def hello_world(): stream = Observable.just("Hello, world!") n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close() <commit_msg>Move stream out of function<commit_after>
import asyncio from rx import Observable stream = Observable.just("Hello, world!") async def hello_world(): n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close()
import asyncio from rx import Observable async def hello_world(): stream = Observable.just("Hello, world!") n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close() Move stream out of functionimport asyncio from rx import Observable stream = Observable.just("Hello, world!") async def hello_world(): n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close()
<commit_before>import asyncio from rx import Observable async def hello_world(): stream = Observable.just("Hello, world!") n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close() <commit_msg>Move stream out of function<commit_after>import asyncio from rx import Observable stream = Observable.just("Hello, world!") async def hello_world(): n = await stream print(n) loop = asyncio.get_event_loop() # Blocking call which returns when the hello_world() coroutine is done loop.run_until_complete(hello_world()) loop.close()
0232afac110e2cf9f841e861bd9622bcaf79616a
tensorbayes/distributions.py
tensorbayes/distributions.py
""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis)
""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1): if eps > 0.0: qv = tf.add(qv, eps, name='clipped_var1') pv = tf.add(qv, eps, name='clipped_var2') return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv + tf.square(qm - pm) / pv - 1, axis=-1)
Add tf implementation of KL between normals
Add tf implementation of KL between normals
Python
mit
RuiShu/tensorbayes
""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) Add tf implementation of KL between normals
""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1): if eps > 0.0: qv = tf.add(qv, eps, name='clipped_var1') pv = tf.add(qv, eps, name='clipped_var2') return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv + tf.square(qm - pm) / pv - 1, axis=-1)
<commit_before>""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) <commit_msg>Add tf implementation of KL between normals<commit_after>
""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1): if eps > 0.0: qv = tf.add(qv, eps, name='clipped_var1') pv = tf.add(qv, eps, name='clipped_var2') return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv + tf.square(qm - pm) / pv - 1, axis=-1)
""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) Add tf implementation of KL between normals""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1): if eps > 0.0: qv = tf.add(qv, eps, name='clipped_var1') pv = tf.add(qv, eps, name='clipped_var2') return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv + tf.square(qm - pm) / pv - 1, axis=-1)
<commit_before>""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) <commit_msg>Add tf implementation of KL between normals<commit_after>""" Assumes softplus activations for gaussian """ import tensorflow as tf import numpy as np def log_bernoulli(x, logits, eps=0.0, axis=-1): return log_bernoulli_with_logits(x, logits, eps, axis) def log_bernoulli_with_logits(x, logits, eps=0.0, axis=-1): if eps > 0.0: max_val = np.log(1.0 - eps) - np.log(eps) logits = tf.clip_by_value(logits, -max_val, max_val, name='clipped_logit') return -tf.reduce_sum( tf.nn.sigmoid_cross_entropy_with_logits(logits=logits, labels=x), axis) def log_normal(x, mu, var, eps=0.0, axis=-1): if eps > 0.0: var = tf.add(var, eps, name='clipped_var') return -0.5 * tf.reduce_sum( tf.log(2 * np.pi) + tf.log(var) + tf.square(x - mu) / var, axis) def kl_normal(qm, qv, pm, pv, eps=0.0, axis=-1): if eps > 0.0: qv = tf.add(qv, eps, name='clipped_var1') pv = tf.add(qv, eps, name='clipped_var2') return 0.5 * tf.reduce_sum(tf.log(pv) - tf.log(qv) + qv / pv + tf.square(qm - pm) / pv - 1, axis=-1)
8c7d1aa097c617ce72cff792a4e19c6168d92e07
source/champollion/__init__.py
source/champollion/__init__.py
# :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective)
# :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) return { "version": __version__ }
Return version in app setup
Return version in app setup
Python
apache-2.0
buddly27/champollion
# :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) Return version in app setup
# :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) return { "version": __version__ }
<commit_before># :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) <commit_msg>Return version in app setup<commit_after>
# :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) return { "version": __version__ }
# :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) Return version in app setup# :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) return { "version": __version__ }
<commit_before># :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) <commit_msg>Return version in app setup<commit_after># :coding: utf-8 import os from ._version import __version__ from directive import ( AutoDataDirective, AutoFunctionDirective, AutoClassDirective ) from viewcode import ( add_source_code_links, create_code_pages, create_missing_code_link ) import parser def parse_js_source(app): """Parse the javascript source path.""" path = os.path.abspath(app.config.js_source) app.env.js_environment = parser.get_environment(path) def setup(app): """Register the javascript autodoc directives.""" app.add_config_value("js_source", None, "env") app.connect("builder-inited", parse_js_source) app.connect("doctree-read", add_source_code_links) app.connect("html-collect-pages", create_code_pages) app.connect("missing-reference", create_missing_code_link) app.add_directive_to_domain("js", "autodata", AutoDataDirective) app.add_directive_to_domain("js", "autofunction", AutoFunctionDirective) app.add_directive_to_domain("js", "autoclass", AutoClassDirective) # app.add_directive_to_domain("js", "automodule", AutoModuleDirective) return { "version": __version__ }
e0439addb7ab71a2af9b30457975bddded4e6020
web/setup.py
web/setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django>=1.8.0', ], setup_requires=['pytest-runner', ], tests_require=['pytest', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django==1.8.13', ], setup_requires=['pytest-runner', ], tests_require=['pytest', 'mock', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, )
Set Django to version 1.8.13 and add mock as dependency
Set Django to version 1.8.13 and add mock as dependency
Python
mit
chyla/pat-lms,chyla/pat-lms,chyla/slas,chyla/slas,chyla/pat-lms,chyla/slas,chyla/slas,chyla/pat-lms,chyla/pat-lms,chyla/slas,chyla/pat-lms,chyla/slas,chyla/pat-lms,chyla/slas
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django>=1.8.0', ], setup_requires=['pytest-runner', ], tests_require=['pytest', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, ) Set Django to version 1.8.13 and add mock as dependency
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django==1.8.13', ], setup_requires=['pytest-runner', ], tests_require=['pytest', 'mock', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django>=1.8.0', ], setup_requires=['pytest-runner', ], tests_require=['pytest', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, ) <commit_msg>Set Django to version 1.8.13 and add mock as dependency<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django==1.8.13', ], setup_requires=['pytest-runner', ], tests_require=['pytest', 'mock', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, )
#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django>=1.8.0', ], setup_requires=['pytest-runner', ], tests_require=['pytest', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, ) Set Django to version 1.8.13 and add mock as dependency#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django==1.8.13', ], setup_requires=['pytest-runner', ], tests_require=['pytest', 'mock', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django>=1.8.0', ], setup_requires=['pytest-runner', ], tests_require=['pytest', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, ) <commit_msg>Set Django to version 1.8.13 and add mock as dependency<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages PACKAGE = "patlms-web" setup( name=PACKAGE, version=0.1, description='PAT LMS web server', author='Adam Chyła', author_email='adam@chyla.org', license='GPLv3', url='https://github.com/chyla/pat-lms', packages=find_packages(exclude=['tests.*', 'tests', 'test.*', 'test*']), install_requires=['Django==1.8.13', ], setup_requires=['pytest-runner', ], tests_require=['pytest', 'mock', ], classifiers=[ 'Environment :: Web Environment', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Framework :: Django', ], zip_safe=False, )
4ba390219d58d1726773e14928428f2c9495f6de
api/src/SearchApi.py
api/src/SearchApi.py
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, safeSearch="moderate", #could also be strict videoSyndicated="true", eventType="completed", videoDefinition="high", #could also be standard videoDuration="short", #max length of video 4mins, medium:4min-20min long order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5)
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5)
Update search api to produce results more consistant with those found on youtube
Update search api to produce results more consistant with those found on youtube
Python
mit
jghibiki/mopey,jghibiki/mopey,jghibiki/mopey,jghibiki/mopey,jghibiki/mopey
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, safeSearch="moderate", #could also be strict videoSyndicated="true", eventType="completed", videoDefinition="high", #could also be standard videoDuration="short", #max length of video 4mins, medium:4min-20min long order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5) Update search api to produce results more consistant with those found on youtube
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5)
<commit_before>from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, safeSearch="moderate", #could also be strict videoSyndicated="true", eventType="completed", videoDefinition="high", #could also be standard videoDuration="short", #max length of video 4mins, medium:4min-20min long order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5) <commit_msg>Update search api to produce results more consistant with those found on youtube<commit_after>
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5)
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, safeSearch="moderate", #could also be strict videoSyndicated="true", eventType="completed", videoDefinition="high", #could also be standard videoDuration="short", #max length of video 4mins, medium:4min-20min long order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5) Update search api to produce results more consistant with those found on youtubefrom apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5)
<commit_before>from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, safeSearch="moderate", #could also be strict videoSyndicated="true", eventType="completed", videoDefinition="high", #could also be standard videoDuration="short", #max length of video 4mins, medium:4min-20min long order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5) <commit_msg>Update search api to produce results more consistant with those found on youtube<commit_after>from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5)
e1888771261878d576b05bab806e1abfdc1d25bb
ExpandVariables.py
ExpandVariables.py
import sublime, string, platform def expand_variables(the_dict, the_vars): return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict
import sublime, string, platform def expand_variables(the_dict, the_vars): the_vars['machine'] = platform.machine() the_vars['processor'] = platform.processor() return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict
Add extra vars "machine" and "processor" for the cmake dictionary.
Add extra vars "machine" and "processor" for the cmake dictionary.
Python
mit
rwols/CMakeBuilder
import sublime, string, platform def expand_variables(the_dict, the_vars): return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict Add extra vars "machine" and "processor" for the cmake dictionary.
import sublime, string, platform def expand_variables(the_dict, the_vars): the_vars['machine'] = platform.machine() the_vars['processor'] = platform.processor() return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict
<commit_before>import sublime, string, platform def expand_variables(the_dict, the_vars): return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict <commit_msg>Add extra vars "machine" and "processor" for the cmake dictionary.<commit_after>
import sublime, string, platform def expand_variables(the_dict, the_vars): the_vars['machine'] = platform.machine() the_vars['processor'] = platform.processor() return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict
import sublime, string, platform def expand_variables(the_dict, the_vars): return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict Add extra vars "machine" and "processor" for the cmake dictionary.import sublime, string, platform def expand_variables(the_dict, the_vars): the_vars['machine'] = platform.machine() the_vars['processor'] = platform.processor() return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict
<commit_before>import sublime, string, platform def expand_variables(the_dict, the_vars): return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict <commit_msg>Add extra vars "machine" and "processor" for the cmake dictionary.<commit_after>import sublime, string, platform def expand_variables(the_dict, the_vars): the_vars['machine'] = platform.machine() the_vars['processor'] = platform.processor() return _expand_variables_recursive(the_dict, the_vars) def _expand_variables_recursive(the_dict, the_vars): for key, value in the_dict.items(): if isinstance(value, dict): value = expand_variables(value, the_vars) elif isinstance(value, str): the_dict[key] = string.Template(value).substitute(the_vars) else: continue return the_dict
115ffb22128e12a0cc88b7c0cd1dd9bde04fb768
wagtail/utils/compat.py
wagtail/utils/compat.py
def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. return getattr(rel, 'related_model', rel.model)
import django def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. if django.VERSION >= (1, 8): return rel.related_model else: return rel.model
Check Django version instead of hasattr
Check Django version instead of hasattr
Python
bsd-3-clause
mixxorz/wagtail,taedori81/wagtail,FlipperPA/wagtail,mixxorz/wagtail,bjesus/wagtail,mjec/wagtail,stevenewey/wagtail,gasman/wagtail,hanpama/wagtail,thenewguy/wagtail,serzans/wagtail,kurtw/wagtail,Klaudit/wagtail,hamsterbacke23/wagtail,rv816/wagtail,Klaudit/wagtail,janusnic/wagtail,kurtrwall/wagtail,marctc/wagtail,rjsproxy/wagtail,jordij/wagtail,quru/wagtail,davecranwell/wagtail,hanpama/wagtail,zerolab/wagtail,JoshBarr/wagtail,takeflight/wagtail,rv816/wagtail,Klaudit/wagtail,inonit/wagtail,JoshBarr/wagtail,gasman/wagtail,mephizzle/wagtail,mayapurmedia/wagtail,jnns/wagtail,quru/wagtail,mixxorz/wagtail,iho/wagtail,janusnic/wagtail,timorieber/wagtail,darith27/wagtail,bjesus/wagtail,KimGlazebrook/wagtail-experiment,taedori81/wagtail,WQuanfeng/wagtail,nilnvoid/wagtail,kaedroho/wagtail,gogobook/wagtail,jnns/wagtail,chrxr/wagtail,mjec/wagtail,hamsterbacke23/wagtail,wagtail/wagtail,gogobook/wagtail,rsalmaso/wagtail,takeflight/wagtail,nimasmi/wagtail,kaedroho/wagtail,WQuanfeng/wagtail,hamsterbacke23/wagtail,nutztherookie/wagtail,nrsimha/wagtail,FlipperPA/wagtail,iansprice/wagtail,mayapurmedia/wagtail,rjsproxy/wagtail,thenewguy/wagtail,takeshineshiro/wagtail,mjec/wagtail,nilnvoid/wagtail,davecranwell/wagtail,thenewguy/wagtail,chrxr/wagtail,rsalmaso/wagtail,takeshineshiro/wagtail,Klaudit/wagtail,kurtrwall/wagtail,mephizzle/wagtail,taedori81/wagtail,jnns/wagtail,iho/wagtail,janusnic/wagtail,nrsimha/wagtail,serzans/wagtail,stevenewey/wagtail,FlipperPA/wagtail,nimasmi/wagtail,hanpama/wagtail,kaedroho/wagtail,m-sanders/wagtail,stevenewey/wagtail,hanpama/wagtail,jnns/wagtail,jordij/wagtail,taedori81/wagtail,mikedingjan/wagtail,gogobook/wagtail,mayapurmedia/wagtail,bjesus/wagtail,kurtw/wagtail,nimasmi/wagtail,torchbox/wagtail,takeflight/wagtail,m-sanders/wagtail,mixxorz/wagtail,kurtrwall/wagtail,kurtw/wagtail,Toshakins/wagtail,stevenewey/wagtail,wagtail/wagtail,mikedingjan/wagtail,kaedroho/wagtail,jordij/wagtail,nealtodd/wagtail,mixxorz/wagtail,m-sanders/wagtail,Tivix/wagtail,JoshBarr/wagtail,torchbox/wagtail,torchbox/wagtail,zerolab/wagtail,nilnvoid/wagtail,rsalmaso/wagtail,Tivix/wagtail,zerolab/wagtail,iansprice/wagtail,Pennebaker/wagtail,nilnvoid/wagtail,serzans/wagtail,Tivix/wagtail,timorieber/wagtail,quru/wagtail,zerolab/wagtail,mikedingjan/wagtail,hamsterbacke23/wagtail,gasman/wagtail,mikedingjan/wagtail,darith27/wagtail,rv816/wagtail,gogobook/wagtail,marctc/wagtail,nrsimha/wagtail,zerolab/wagtail,tangentlabs/wagtail,Pennebaker/wagtail,gasman/wagtail,chrxr/wagtail,davecranwell/wagtail,thenewguy/wagtail,rsalmaso/wagtail,Tivix/wagtail,nealtodd/wagtail,nimasmi/wagtail,Pennebaker/wagtail,WQuanfeng/wagtail,bjesus/wagtail,nutztherookie/wagtail,kurtrwall/wagtail,darith27/wagtail,tangentlabs/wagtail,Toshakins/wagtail,inonit/wagtail,jordij/wagtail,rv816/wagtail,marctc/wagtail,nutztherookie/wagtail,wagtail/wagtail,janusnic/wagtail,inonit/wagtail,WQuanfeng/wagtail,m-sanders/wagtail,iansprice/wagtail,thenewguy/wagtail,mayapurmedia/wagtail,tangentlabs/wagtail,timorieber/wagtail,mjec/wagtail,gasman/wagtail,rjsproxy/wagtail,iansprice/wagtail,wagtail/wagtail,takeshineshiro/wagtail,nutztherookie/wagtail,serzans/wagtail,nealtodd/wagtail,quru/wagtail,inonit/wagtail,Toshakins/wagtail,takeflight/wagtail,chrxr/wagtail,darith27/wagtail,nealtodd/wagtail,Toshakins/wagtail,takeshineshiro/wagtail,mephizzle/wagtail,JoshBarr/wagtail,KimGlazebrook/wagtail-experiment,FlipperPA/wagtail,wagtail/wagtail,davecranwell/wagtail,tangentlabs/wagtail,torchbox/wagtail,kaedroho/wagtail,KimGlazebrook/wagtail-experiment,mephizzle/wagtail,marctc/wagtail,kurtw/wagtail,iho/wagtail,rjsproxy/wagtail,taedori81/wagtail,nrsimha/wagtail,iho/wagtail,timorieber/wagtail,Pennebaker/wagtail,KimGlazebrook/wagtail-experiment,rsalmaso/wagtail
def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. return getattr(rel, 'related_model', rel.model) Check Django version instead of hasattr
import django def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. if django.VERSION >= (1, 8): return rel.related_model else: return rel.model
<commit_before>def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. return getattr(rel, 'related_model', rel.model) <commit_msg>Check Django version instead of hasattr<commit_after>
import django def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. if django.VERSION >= (1, 8): return rel.related_model else: return rel.model
def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. return getattr(rel, 'related_model', rel.model) Check Django version instead of hasattrimport django def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. if django.VERSION >= (1, 8): return rel.related_model else: return rel.model
<commit_before>def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. return getattr(rel, 'related_model', rel.model) <commit_msg>Check Django version instead of hasattr<commit_after>import django def get_related_model(rel): # In Django 1.7 and under, the related model is accessed by doing: rel.model # This was renamed in Django 1.8 to rel.related_model. rel.model now returns # the base model. if django.VERSION >= (1, 8): return rel.related_model else: return rel.model
eaf577b7a4aebc872cbf2b5674f9365faeec9cfb
template/module/__openerp__.py
template/module/__openerp__.py
# -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], }
# -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "<AUTHOR(S)>, Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], }
Add real author to author key too.
Add real author to author key too.
Python
agpl-3.0
acsone/maintainers-tools,Endika/maintainer-tools,sambathkumarpi/maintainer-tools,dreispt/maintainer-tools,OCA/maintainer-tools,Yajo/maintainer-tools,Yajo/maintainer-tools,Vauxoo/maintainer-tools,vauxoo-dev/maintainer-tools,OCA/maintainer-tools,acsone/maintainer-tools,dreispt/maintainer-tools,tafaRU/maintainer-tools,Yajo/maintainer-tools,gurneyalex/maintainers-tools,hbrunn/maintainers-tools,tafaRU/maintainer-tools,Endika/maintainer-tools,Yajo/maintainer-tools,OCA/maintainer-tools,acsone/maintainer-tools,OCA/maintainer-tools,akretion/maintainer-tools,dreispt/maintainer-tools,Vauxoo/maintainer-tools,hbrunn/maintainers-tools,Endika/maintainer-tools,sambathkumarpi/maintainer-tools,acsone/maintainers-tools,akretion/maintainer-tools,acsone/maintainers-tools,vauxoo-dev/maintainer-tools,acsone/maintainer-tools,acsone/maintainer-tools,vauxoo-dev/maintainer-tools
# -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], } Add real author to author key too.
# -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "<AUTHOR(S)>, Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], }
<commit_before># -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], } <commit_msg>Add real author to author key too.<commit_after>
# -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "<AUTHOR(S)>, Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], }
# -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], } Add real author to author key too.# -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "<AUTHOR(S)>, Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], }
<commit_before># -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], } <commit_msg>Add real author to author key too.<commit_after># -*- coding: utf-8 -*- # © <YEAR(S)> <AUTHOR(S)> # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html). { "name": "Module name", "summary": "Module summary", "version": "8.0.1.0.0", "category": "Uncategorized", "license": "AGPL-3", "website": "https://odoo-community.org/", "author": "<AUTHOR(S)>, Odoo Community Association (OCA)", "application": False, "installable": True, "external_dependencies": { "python": [], "bin": [], }, "depends": [ ], "data": [ ], "demo": [ ], }
db64ca09e57da414d92888de1b52fade810d855e
handlers/downloadMapHandler.py
handlers/downloadMapHandler.py
from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302) url = "http://m.zxq.co/{}.osz".format(bid) #url = "https://bloodcat.com/osu/s/{}".format(bid) self.add_header("location", url) print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish()
from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302, "Moved Temporarily") url = "http://m.zxq.co/{}.osz".format(bid) self.add_header("Location", url) self.add_header("Cache-Control", "no-cache") self.add_header("Pragma", "no-cache") print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish()
Add some headers in osu! direct download
Add some headers in osu! direct download
Python
agpl-3.0
osuripple/lets,osuripple/lets
from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302) url = "http://m.zxq.co/{}.osz".format(bid) #url = "https://bloodcat.com/osu/s/{}".format(bid) self.add_header("location", url) print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish() Add some headers in osu! direct download
from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302, "Moved Temporarily") url = "http://m.zxq.co/{}.osz".format(bid) self.add_header("Location", url) self.add_header("Cache-Control", "no-cache") self.add_header("Pragma", "no-cache") print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish()
<commit_before>from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302) url = "http://m.zxq.co/{}.osz".format(bid) #url = "https://bloodcat.com/osu/s/{}".format(bid) self.add_header("location", url) print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish() <commit_msg>Add some headers in osu! direct download<commit_after>
from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302, "Moved Temporarily") url = "http://m.zxq.co/{}.osz".format(bid) self.add_header("Location", url) self.add_header("Cache-Control", "no-cache") self.add_header("Pragma", "no-cache") print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish()
from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302) url = "http://m.zxq.co/{}.osz".format(bid) #url = "https://bloodcat.com/osu/s/{}".format(bid) self.add_header("location", url) print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish() Add some headers in osu! direct downloadfrom helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302, "Moved Temporarily") url = "http://m.zxq.co/{}.osz".format(bid) self.add_header("Location", url) self.add_header("Cache-Control", "no-cache") self.add_header("Pragma", "no-cache") print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish()
<commit_before>from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302) url = "http://m.zxq.co/{}.osz".format(bid) #url = "https://bloodcat.com/osu/s/{}".format(bid) self.add_header("location", url) print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish() <commit_msg>Add some headers in osu! direct download<commit_after>from helpers import requestHelper import requests import glob # Exception tracking import tornado.web import tornado.gen import sys import traceback from raven.contrib.tornado import SentryMixin MODULE_NAME = "direct_download" class handler(SentryMixin, requestHelper.asyncRequestHandler): """ Handler for /d/ """ @tornado.web.asynchronous @tornado.gen.engine def asyncGet(self, bid): try: self.set_status(302, "Moved Temporarily") url = "http://m.zxq.co/{}.osz".format(bid) self.add_header("Location", url) self.add_header("Cache-Control", "no-cache") self.add_header("Pragma", "no-cache") print(url) #f = requests.get(url) #self.write(str(f)) except: log.error("Unknown error in {}!\n```{}\n{}```".format(MODULE_NAME, sys.exc_info(), traceback.format_exc())) if glob.sentry: yield tornado.gen.Task(self.captureException, exc_info=True) #finally: # self.finish()
03b55cad3839653cea62300eca80571541579d2b
dataviews/__init__.py
dataviews/__init__.py
import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ]
import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) from .styles import set_style set_style('default') def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ]
Apply default style on import
Apply default style on import
Python
bsd-3-clause
vascotenner/holoviews,basnijholt/holoviews,vascotenner/holoviews,basnijholt/holoviews,mjabri/holoviews,mjabri/holoviews,ioam/holoviews,basnijholt/holoviews,mjabri/holoviews,ioam/holoviews,vascotenner/holoviews,ioam/holoviews
import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ] Apply default style on import
import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) from .styles import set_style set_style('default') def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ]
<commit_before>import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ] <commit_msg>Apply default style on import<commit_after>
import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) from .styles import set_style set_style('default') def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ]
import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ] Apply default style on importimport sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) from .styles import set_style set_style('default') def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ]
<commit_before>import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ] <commit_msg>Apply default style on import<commit_after>import sys, os # Add param submodule to sys.path cwd = os.path.abspath(os.path.split(__file__)[0]) sys.path.insert(0, os.path.join(cwd, '..', 'param')) import param __version__ = param.Version(release=(0,7), fpath=__file__, commit="$Format:%h$", reponame='dataviews') from .views import * # pyflakes:ignore (API import) from .dataviews import * # pyflakes:ignore (API import) from .sheetviews import * # pyflakes:ignore (API import) from .ndmapping import * # pyflakes:ignore (API import) from .styles import set_style set_style('default') def public(obj): if not isinstance(obj, type): return False baseclasses = [NdMapping, View, Dimension, Overlay] return any([issubclass(obj, bc) for bc in baseclasses]) _public = list(set([_k for _k, _v in locals().items() if public(_v)])) __all__ = _public + ["boundingregion", "ipython", "plotting", "sheetcoords" ]
77122e472c3688f96e77b4f39e9767fed0fb53ae
generate_from_template.py
generate_from_template.py
#! /usr/bin/env python from __future__ import print_function import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) print('Created new CmdOyster:\n{}'.format(new_filepath))
#! /usr/bin/env python import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) sys.stdout.write('{}\n'.format(new_filepath))
Make output terse and parseable.
Make output terse and parseable.
Python
mit
nbeaver/cmd-oysters,nbeaver/cmd-oysters
#! /usr/bin/env python from __future__ import print_function import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) print('Created new CmdOyster:\n{}'.format(new_filepath)) Make output terse and parseable.
#! /usr/bin/env python import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) sys.stdout.write('{}\n'.format(new_filepath))
<commit_before>#! /usr/bin/env python from __future__ import print_function import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) print('Created new CmdOyster:\n{}'.format(new_filepath)) <commit_msg>Make output terse and parseable.<commit_after>
#! /usr/bin/env python import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) sys.stdout.write('{}\n'.format(new_filepath))
#! /usr/bin/env python from __future__ import print_function import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) print('Created new CmdOyster:\n{}'.format(new_filepath)) Make output terse and parseable.#! /usr/bin/env python import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) sys.stdout.write('{}\n'.format(new_filepath))
<commit_before>#! /usr/bin/env python from __future__ import print_function import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) print('Created new CmdOyster:\n{}'.format(new_filepath)) <commit_msg>Make output terse and parseable.<commit_after>#! /usr/bin/env python import os.path import sys import json import uuid root = sys.path[0] template_path = os.path.join(root, 'templates', 'simple.json') with open(template_path) as template: oyster = json.load(template) new_id = str(uuid.uuid4()) new_filename = new_id + '.json' new_filepath = os.path.join(root, 'cmdoysters', new_filename) with open(new_filepath, 'w') as new_file: oyster['uuid'] = new_id json.dump(oyster, new_file, indent=4, separators=(',', ': '), sort_keys=True) sys.stdout.write('{}\n'.format(new_filepath))
3eb84a69cf39d72bf0d8dcf7f61c50972aca1c07
parse.py
parse.py
#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv)
#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.4.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv)
Switch to Python 3.4 wording.
Switch to Python 3.4 wording.
Python
bsd-3-clause
cjerdonek/wineds-converter
#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv) Switch to Python 3.4 wording.
#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.4.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv)
<commit_before>#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv) <commit_msg>Switch to Python 3.4 wording.<commit_after>
#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.4.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv)
#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv) Switch to Python 3.4 wording.#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.4.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv)
<commit_before>#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv) <commit_msg>Switch to Python 3.4 wording.<commit_after>#!/usr/bin/env python # # **THIS SCRIPT IS WRITTEN FOR PYTHON 3.4.** # """ Usage: python3.4 parse.py ELECTION_NAME PRECINCTS.csv WINEDS.txt OUTPUT.tsv Parses the given files and writes a new output file to stdout. The new output file is tab-delimited (.tsv). Tabs are used since some fields contain commas (e.g. "US Representative, District 12"). Arguments: ELECTION_NAME: the name of the election for display purposes. This appears in the first line of the output file. An example value is "San Francisco June 3, 2014 Election". PRECINCTS.csv: path to a CSV file mapping precincts to their different districts and neighborhoods. WINEDS.txt: path to a TXT export file from the WinEDS Reporting Tool. The report contains vote totals for each precinct in each contest, along with "registered voters" and "ballots cast" totals. OUTPUT.tsv: desired output path. In the above, relative paths will be interpreted as relative to the current working directory. """ import sys from pywineds.parser import main if __name__ == "__main__": main(__doc__, sys.argv)
e26b0acb0d935348bcbc4e9e012cff3c9ecb353f
journal/tests/test_activity.py
journal/tests/test_activity.py
import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type=[1, 2], learning_obj=[1, 2, 3], entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block')
import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type='1', learning_obj='1,2,3', entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block')
Fix Activity CSV field test
Fix Activity CSV field test
Python
apache-2.0
WildCAS/CASCategorization,WildCAS/CASCategorization,WildCAS/CASCategorization
import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type=[1, 2], learning_obj=[1, 2, 3], entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block') Fix Activity CSV field test
import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type='1', learning_obj='1,2,3', entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block')
<commit_before>import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type=[1, 2], learning_obj=[1, 2, 3], entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block') <commit_msg>Fix Activity CSV field test<commit_after>
import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type='1', learning_obj='1,2,3', entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block')
import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type=[1, 2], learning_obj=[1, 2, 3], entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block') Fix Activity CSV field testimport datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type='1', learning_obj='1,2,3', entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block')
<commit_before>import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type=[1, 2], learning_obj=[1, 2, 3], entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block') <commit_msg>Fix Activity CSV field test<commit_after>import datetime from django.test import TestCase from journal.models import Activity, Entry from journal.serializers import ActivitySerializer class ActivityTestCase(TestCase): """Sanity checks for activity""" def setUp(self): cat_e = Entry.objects.create(entry='I like walking the cat') Activity.objects.create(name='Walking the cat', description='Walking the cat around the block', activity_type='1', learning_obj='1,2,3', entries=cat_e, start_date=datetime.date.today) def test_activity_serializer(self): cat_activity = Activity.objects.get(name='Walking the cat') self.assertEqual(cat_activity.name, 'Walking the cat') cat_serializer = ActivitySerializer(cat_activity) self.assertEqual(cat_serializer.data['description'], 'Walking the cat around the block')
54cdfe437c5382bde19f5d63086ce54c3d991e8b
ibmcnx/doc/DataSources.py
ibmcnx/doc/DataSources.py
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'[[statementCacheSize]]' )
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'statementCacheSize' ) AdminConfig.showAttribute(t1,'[statementCacheSize]' )
Create documentation of DataSource Settings
: Create documentation of DataSource Settings Task-Url:
Python
apache-2.0
stoeps13/ibmcnx2,stoeps13/ibmcnx2
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'[[statementCacheSize]]' ): Create documentation of DataSource Settings Task-Url:
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'statementCacheSize' ) AdminConfig.showAttribute(t1,'[statementCacheSize]' )
<commit_before>###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'[[statementCacheSize]]' )<commit_msg>: Create documentation of DataSource Settings Task-Url: <commit_after>
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'statementCacheSize' ) AdminConfig.showAttribute(t1,'[statementCacheSize]' )
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'[[statementCacheSize]]' ): Create documentation of DataSource Settings Task-Url: ###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'statementCacheSize' ) AdminConfig.showAttribute(t1,'[statementCacheSize]' )
<commit_before>###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'[[statementCacheSize]]' )<commit_msg>: Create documentation of DataSource Settings Task-Url: <commit_after>###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check for db in dbs: t1 = ibmcnx.functions.getDSId( db ) AdminConfig.show( t1 ) print '\n\n' AdminConfig.showall( t1 ) AdminConfig.showAttribute(t1,'statementCacheSize' ) AdminConfig.showAttribute(t1,'[statementCacheSize]' )
b04e7afbd56518ba0e825d70b11a0c88e2d6e29d
astm/tests/utils.py
astm/tests/utils.py
# -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func)
# -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' addr = ('localhost', '15200') def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func)
Set dummy address info for tests.
Set dummy address info for tests.
Python
bsd-3-clause
asingla87/python-astm,andrexmd/python-astm,pombreda/python-astm,mhaulo/python-astm,MarcosHaenisch/python-astm,briankip/python-astm,kxepal/python-astm,123412345/python-astm,tinoshot/python-astm,eddiep1101/python-astm,LogicalKnight/python-astm,Iskander1b/python-astm,AlanZatarain/python-astm,kxepal/python-astm,tectronics/python-astm,Alwnikrotikz/python-astm
# -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func) Set dummy address info for tests.
# -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' addr = ('localhost', '15200') def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func)
<commit_before># -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func) <commit_msg>Set dummy address info for tests.<commit_after>
# -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' addr = ('localhost', '15200') def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func)
# -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func) Set dummy address info for tests.# -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' addr = ('localhost', '15200') def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func)
<commit_before># -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func) <commit_msg>Set dummy address info for tests.<commit_after># -*- coding: utf-8 -*- # # Copyright (C) 2012 Alexander Shorin # All rights reserved. # # This software is licensed as described in the file COPYING, which # you should have received as part of this distribution. # class DummyMixIn(object): _input_buffer = '' addr = ('localhost', '15200') def flush(self): pass def close(self): pass class CallLogger(object): def __init__(self, func): self.func = func self.was_called = False def __call__(self, *args, **kwargs): self.was_called = True return self.func(*args, **kwargs) def track_call(func): return CallLogger(func)
4cd96824e2903397751a738cd1736ad2809b6c04
cypher/cypher.py
cypher/cypher.py
import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: return -1 else: print(result) return 0
import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: print("Language not recognized.") else: print(result) return result if result < 0 else 0
Print "Language not recognized." on failure
Print "Language not recognized." on failure
Python
mit
jdkato/codetype,jdkato/codetype
import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: return -1 else: print(result) return 0 Print "Language not recognized." on failure
import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: print("Language not recognized.") else: print(result) return result if result < 0 else 0
<commit_before>import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: return -1 else: print(result) return 0 <commit_msg>Print "Language not recognized." on failure<commit_after>
import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: print("Language not recognized.") else: print(result) return result if result < 0 else 0
import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: return -1 else: print(result) return 0 Print "Language not recognized." on failureimport argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: print("Language not recognized.") else: print(result) return result if result < 0 else 0
<commit_before>import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: return -1 else: print(result) return 0 <commit_msg>Print "Language not recognized." on failure<commit_after>import argparse from .util import identify parser = argparse.ArgumentParser( prog="cypher", description="A source code identification tool." ) parser.add_argument( "src", nargs=1, help="Path to unknown source code." ) parser.add_argument( "-v", "--verbose", action="store_true", help="Return all scores." ) args = vars(parser.parse_args()) def main(): result = identify(args["src"][0], verbose=args["verbose"]) if result < 0: print("Language not recognized.") else: print(result) return result if result < 0 else 0
b589fd212b8cbeeb64d41f0276c17278b9b4bba4
st2client/st2client/models/datastore.py
st2client/st2client/models/datastore.py
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] @property def id(self): # Note: This is a temporary hack until we refactor client and make it support non id PKs return self.name
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] # Note: This is a temporary hack until we refactor client and make it support non id PKs def get_id(self): return self.name def set_id(self, value): self.name = value id = property(get_id, set_id)
Fix it so it still works with a setter.
Fix it so it still works with a setter.
Python
apache-2.0
tonybaloney/st2,StackStorm/st2,StackStorm/st2,punalpatel/st2,punalpatel/st2,Plexxi/st2,Plexxi/st2,grengojbo/st2,pixelrebel/st2,pinterb/st2,pixelrebel/st2,dennybaa/st2,alfasin/st2,alfasin/st2,tonybaloney/st2,Itxaka/st2,nzlosh/st2,nzlosh/st2,armab/st2,pinterb/st2,peak6/st2,dennybaa/st2,jtopjian/st2,lakshmi-kannan/st2,tonybaloney/st2,punalpatel/st2,StackStorm/st2,jtopjian/st2,lakshmi-kannan/st2,emedvedev/st2,armab/st2,Plexxi/st2,alfasin/st2,dennybaa/st2,pixelrebel/st2,Itxaka/st2,peak6/st2,emedvedev/st2,jtopjian/st2,grengojbo/st2,Itxaka/st2,grengojbo/st2,armab/st2,pinterb/st2,StackStorm/st2,nzlosh/st2,nzlosh/st2,lakshmi-kannan/st2,peak6/st2,Plexxi/st2,emedvedev/st2
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] @property def id(self): # Note: This is a temporary hack until we refactor client and make it support non id PKs return self.name Fix it so it still works with a setter.
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] # Note: This is a temporary hack until we refactor client and make it support non id PKs def get_id(self): return self.name def set_id(self, value): self.name = value id = property(get_id, set_id)
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] @property def id(self): # Note: This is a temporary hack until we refactor client and make it support non id PKs return self.name <commit_msg>Fix it so it still works with a setter.<commit_after>
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] # Note: This is a temporary hack until we refactor client and make it support non id PKs def get_id(self): return self.name def set_id(self, value): self.name = value id = property(get_id, set_id)
# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] @property def id(self): # Note: This is a temporary hack until we refactor client and make it support non id PKs return self.name Fix it so it still works with a setter.# Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] # Note: This is a temporary hack until we refactor client and make it support non id PKs def get_id(self): return self.name def set_id(self, value): self.name = value id = property(get_id, set_id)
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] @property def id(self): # Note: This is a temporary hack until we refactor client and make it support non id PKs return self.name <commit_msg>Fix it so it still works with a setter.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import logging from st2client.models import core LOG = logging.getLogger(__name__) class KeyValuePair(core.Resource): _alias = 'Key' _display_name = 'Key Value Pair' _plural = 'Keys' _plural_display_name = 'Key Value Pairs' _repr_attributes = ['name', 'value'] # Note: This is a temporary hack until we refactor client and make it support non id PKs def get_id(self): return self.name def set_id(self, value): self.name = value id = property(get_id, set_id)
bc7bf2a09fe430bb2048842626ecbb476bc6b40c
script/generate_amalgamation.py
script/generate_amalgamation.py
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f)
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') WREN_DIR = dirname(dirname(realpath(__file__))) seen_files = set() out = sys.stdout # Prints a plain text file, adding comment markers. def add_comment_file(filename): with open(filename, 'r') as f: for line in f: out.write('// ') out.write(line) # Prints the given C source file, recursively resolving local #includes. def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) # Print license on top. add_comment_file(join(WREN_DIR, 'LICENSE')) out.write('\n') # Source files. for f in sys.argv[1:]: add_file(f)
Print LICENSE on top of the amalgamation
Print LICENSE on top of the amalgamation
Python
mit
Rohansi/wren,Nelarius/wren,minirop/wren,foresterre/wren,munificent/wren,Nave-Neel/wren,foresterre/wren,Nave-Neel/wren,Nelarius/wren,minirop/wren,Nelarius/wren,Nelarius/wren,foresterre/wren,foresterre/wren,bigdimboom/wren,minirop/wren,bigdimboom/wren,munificent/wren,Rohansi/wren,munificent/wren,bigdimboom/wren,munificent/wren,Rohansi/wren,Nave-Neel/wren,Rohansi/wren,bigdimboom/wren,foresterre/wren,Nelarius/wren,minirop/wren,minirop/wren,munificent/wren,munificent/wren,Nave-Neel/wren
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f) Print LICENSE on top of the amalgamation
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') WREN_DIR = dirname(dirname(realpath(__file__))) seen_files = set() out = sys.stdout # Prints a plain text file, adding comment markers. def add_comment_file(filename): with open(filename, 'r') as f: for line in f: out.write('// ') out.write(line) # Prints the given C source file, recursively resolving local #includes. def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) # Print license on top. add_comment_file(join(WREN_DIR, 'LICENSE')) out.write('\n') # Source files. for f in sys.argv[1:]: add_file(f)
<commit_before>#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f) <commit_msg>Print LICENSE on top of the amalgamation<commit_after>
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') WREN_DIR = dirname(dirname(realpath(__file__))) seen_files = set() out = sys.stdout # Prints a plain text file, adding comment markers. def add_comment_file(filename): with open(filename, 'r') as f: for line in f: out.write('// ') out.write(line) # Prints the given C source file, recursively resolving local #includes. def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) # Print license on top. add_comment_file(join(WREN_DIR, 'LICENSE')) out.write('\n') # Source files. for f in sys.argv[1:]: add_file(f)
#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f) Print LICENSE on top of the amalgamation#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') WREN_DIR = dirname(dirname(realpath(__file__))) seen_files = set() out = sys.stdout # Prints a plain text file, adding comment markers. def add_comment_file(filename): with open(filename, 'r') as f: for line in f: out.write('// ') out.write(line) # Prints the given C source file, recursively resolving local #includes. def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) # Print license on top. add_comment_file(join(WREN_DIR, 'LICENSE')) out.write('\n') # Source files. for f in sys.argv[1:]: add_file(f)
<commit_before>#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') seen_files = set() out = sys.stdout def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) for f in sys.argv[1:]: add_file(f) <commit_msg>Print LICENSE on top of the amalgamation<commit_after>#!/usr/bin/env python import sys from os.path import basename, dirname, join import re INCLUDE_PATTERN = re.compile(r'^\s*#include "([\w.]+)"') WREN_DIR = dirname(dirname(realpath(__file__))) seen_files = set() out = sys.stdout # Prints a plain text file, adding comment markers. def add_comment_file(filename): with open(filename, 'r') as f: for line in f: out.write('// ') out.write(line) # Prints the given C source file, recursively resolving local #includes. def add_file(filename): bname = basename(filename) # Only include each file at most once. if bname in seen_files: return seen_files.add(bname) path = dirname(filename) out.write('// Begin file "{0}"\n'.format(filename)) with open(filename, 'r') as f: for line in f: m = INCLUDE_PATTERN.match(line) if m: add_file(join(path, m.group(1))) else: out.write(line) out.write('// End file "{0}"\n'.format(filename)) # Print license on top. add_comment_file(join(WREN_DIR, 'LICENSE')) out.write('\n') # Source files. for f in sys.argv[1:]: add_file(f)
fc3589210e7244239acbc053d7788dc0cd264b88
app/models.py
app/models.py
from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.Text(25)) status = db.Column(db.Text(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit()
from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25)) status = db.Column(db.String(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit()
Fix bug with SQLAlchemy, change TEXT to STRING
Fix bug with SQLAlchemy, change TEXT to STRING
Python
mit
jaredculp/sprinkler-flask-server,jaredculp/sprinkler-flask-server
from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.Text(25)) status = db.Column(db.Text(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit() Fix bug with SQLAlchemy, change TEXT to STRING
from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25)) status = db.Column(db.String(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit()
<commit_before>from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.Text(25)) status = db.Column(db.Text(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit() <commit_msg>Fix bug with SQLAlchemy, change TEXT to STRING<commit_after>
from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25)) status = db.Column(db.String(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit()
from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.Text(25)) status = db.Column(db.Text(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit() Fix bug with SQLAlchemy, change TEXT to STRINGfrom app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25)) status = db.Column(db.String(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit()
<commit_before>from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.Text(25)) status = db.Column(db.Text(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit() <commit_msg>Fix bug with SQLAlchemy, change TEXT to STRING<commit_after>from app import db class Sprinkler(db.Model): id = db.Column(db.Integer, primary_key=True) name = db.Column(db.String(25)) status = db.Column(db.String(25)) flow = db.Column(db.Integer) moisture = db.Column(db.Integer) def __init__(self, name, status, flow, moisture): self.name = name self.status = status self.flow = flow self.moisture = moisture def __repr__(self): return '<Sprinkler#%r %r, Status=%r>' % (self.id, self.name, self.status) def turn_on(self): self.status = 'ON' db.session.commit() def turn_off(self): self.status = 'OFF' db.session.commit()
4c949cd171d50211ec8ebb95be423293ccb6f917
blog/admin.py
blog/admin.py
from django.contrib import admin from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def tag_count(self, post): return post.tags.count() tag_count.short_description = 'Number of Tags'
from django.contrib import admin from django.db.models import Count from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def get_queryset(self, request): queryset = super().get_queryset(request) return queryset.annotate( tag_number=Count('tags')) def tag_count(self, post): return post.tag_number tag_count.short_description = 'Number of Tags' tag_count.admin_order_field = 'tag_number'
Enable sorting of number of Post tags.
Ch23: Enable sorting of number of Post tags.
Python
bsd-2-clause
jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8
from django.contrib import admin from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def tag_count(self, post): return post.tags.count() tag_count.short_description = 'Number of Tags' Ch23: Enable sorting of number of Post tags.
from django.contrib import admin from django.db.models import Count from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def get_queryset(self, request): queryset = super().get_queryset(request) return queryset.annotate( tag_number=Count('tags')) def tag_count(self, post): return post.tag_number tag_count.short_description = 'Number of Tags' tag_count.admin_order_field = 'tag_number'
<commit_before>from django.contrib import admin from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def tag_count(self, post): return post.tags.count() tag_count.short_description = 'Number of Tags' <commit_msg>Ch23: Enable sorting of number of Post tags.<commit_after>
from django.contrib import admin from django.db.models import Count from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def get_queryset(self, request): queryset = super().get_queryset(request) return queryset.annotate( tag_number=Count('tags')) def tag_count(self, post): return post.tag_number tag_count.short_description = 'Number of Tags' tag_count.admin_order_field = 'tag_number'
from django.contrib import admin from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def tag_count(self, post): return post.tags.count() tag_count.short_description = 'Number of Tags' Ch23: Enable sorting of number of Post tags.from django.contrib import admin from django.db.models import Count from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def get_queryset(self, request): queryset = super().get_queryset(request) return queryset.annotate( tag_number=Count('tags')) def tag_count(self, post): return post.tag_number tag_count.short_description = 'Number of Tags' tag_count.admin_order_field = 'tag_number'
<commit_before>from django.contrib import admin from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def tag_count(self, post): return post.tags.count() tag_count.short_description = 'Number of Tags' <commit_msg>Ch23: Enable sorting of number of Post tags.<commit_after>from django.contrib import admin from django.db.models import Count from .models import Post @admin.register(Post) class PostAdmin(admin.ModelAdmin): # list view date_hierarchy = 'pub_date' list_display = ( 'title', 'pub_date', 'tag_count') list_filter = ('pub_date',) search_fields = ('title', 'text') # form view fieldsets = ( (None, { 'fields': ( 'title', 'slug', 'author', 'text', )}), ('Related', { 'fields': ( 'tags', 'startups')}), ) filter_horizontal = ('tags', 'startups',) prepopulated_fields = {"slug": ("title",)} def get_queryset(self, request): queryset = super().get_queryset(request) return queryset.annotate( tag_number=Count('tags')) def tag_count(self, post): return post.tag_number tag_count.short_description = 'Number of Tags' tag_count.admin_order_field = 'tag_number'
072d1cd283794fe0e6471237d818504168de4695
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=open('README.rst').read(), url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', )
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=readme, url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', )
Fix problems with UTF-8 README.rst
Fix problems with UTF-8 README.rst
Python
bsd-3-clause
blancltd/django-paginationlinks
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=open('README.rst').read(), url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', ) Fix problems with UTF-8 README.rst
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=readme, url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=open('README.rst').read(), url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', ) <commit_msg>Fix problems with UTF-8 README.rst<commit_after>
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=readme, url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', )
#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=open('README.rst').read(), url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', ) Fix problems with UTF-8 README.rst#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=readme, url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', )
<commit_before>#!/usr/bin/env python from setuptools import setup, find_packages setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=open('README.rst').read(), url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', ) <commit_msg>Fix problems with UTF-8 README.rst<commit_after>#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-paginationlinks', version='0.1', description='Django Pagination Links', long_description=readme, url='https://github.com/blancltd/django-paginationlinks', maintainer='Alex Tomkins', maintainer_email='alex@blanc.ltd.uk', platforms=['any'], packages=find_packages(), classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', ], license='BSD', )
49fa2f7d94a0da3957764280ee8697a867bcd1ec
setup.py
setup.py
from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] )
from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] )
Remove python 3.2 from trove classifiers
Remove python 3.2 from trove classifiers
Python
mit
djungelorm/sphinx-csharp
from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] ) Remove python 3.2 from trove classifiers
from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] )
<commit_before>from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] ) <commit_msg>Remove python 3.2 from trove classifiers<commit_after>
from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] )
from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] ) Remove python 3.2 from trove classifiersfrom setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] )
<commit_before>from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] ) <commit_msg>Remove python 3.2 from trove classifiers<commit_after>from setuptools import setup setup( name = 'sphinx-csharp', version = '0.1.0', author = 'djungelorm', author_email = 'djungelorm@users.noreply.github.com', packages = ['sphinx_csharp'], url = 'https://github.com/djungelorm/sphinx-csharp', license = 'MIT', description = 'C# domain for Sphinx', install_requires = ['Sphinx'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Operating System :: OS Independent', 'Topic :: Documentation :: Sphinx' ] )
661040496ebb67cb0f8d8e49d734cfa96f14b0c4
setup.py
setup.py
from distutils.core import setup import os, glob, string, shutil # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
import os, glob, string, shutil from distutils.core import setup # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
Test edit - to check svn email hook
Test edit - to check svn email hook
Python
bsd-3-clause
bthirion/nipy,alexis-roche/nireg,alexis-roche/niseg,arokem/nipy,alexis-roche/nipy,bthirion/nipy,bthirion/nipy,arokem/nipy,nipy/nireg,alexis-roche/nipy,arokem/nipy,bthirion/nipy,alexis-roche/nipy,nipy/nipy-labs,alexis-roche/nireg,alexis-roche/niseg,alexis-roche/register,nipy/nireg,nipy/nipy-labs,alexis-roche/register,alexis-roche/nipy,arokem/nipy,alexis-roche/register
from distutils.core import setup import os, glob, string, shutil # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main() Test edit - to check svn email hook
import os, glob, string, shutil from distutils.core import setup # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
<commit_before>from distutils.core import setup import os, glob, string, shutil # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main() <commit_msg>Test edit - to check svn email hook<commit_after>
import os, glob, string, shutil from distutils.core import setup # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
from distutils.core import setup import os, glob, string, shutil # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main() Test edit - to check svn email hookimport os, glob, string, shutil from distutils.core import setup # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
<commit_before>from distutils.core import setup import os, glob, string, shutil # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main() <commit_msg>Test edit - to check svn email hook<commit_after>import os, glob, string, shutil from distutils.core import setup # Packages packages = ['neuroimaging', 'neuroimaging.statistics', 'neuroimaging.image', 'neuroimaging.reference', 'neuroimaging.data', 'neuroimaging.image.formats', 'neuroimaging.image.formats.analyze', 'neuroimaging.fmri', 'neuroimaging.fmri.fmristat', 'neuroimaging.visualization', 'neuroimaging.visualization.cmap'] def main(): setup (name = 'neuroimaging', version = '0.01a', description = 'This is a neuroimaging python package', author = 'Various, one of whom is Jonathan Taylor', author_email = 'jonathan.taylor@stanford.edu', ext_package = 'neuroimaging', packages=packages, package_dir = {'neuroimaging': 'lib'}, url = 'http://neuroimaging.scipy.org', long_description = ''' ''') if __name__ == "__main__": main()
fad6bf214e3b148fc37a154ccf2f56f347e686a4
setup.py
setup.py
# coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], )
# coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization', 'po_localization.tests'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], )
Install tests along with the rest of the code
Install tests along with the rest of the code
Python
mit
kmichel/po-localization
# coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], ) Install tests along with the rest of the code
# coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization', 'po_localization.tests'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], )
<commit_before># coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], ) <commit_msg>Install tests along with the rest of the code<commit_after>
# coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization', 'po_localization.tests'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], )
# coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], ) Install tests along with the rest of the code# coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization', 'po_localization.tests'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], )
<commit_before># coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], ) <commit_msg>Install tests along with the rest of the code<commit_after># coding=utf-8 from distutils.core import setup __version__ = 'unknown' with open('po_localization/version.py') as version_file: exec(version_file.read()) setup( name='po_localization', packages=['po_localization', 'po_localization.tests'], version=__version__, description='Localize Django applications without compiling .po files', author='Kevin Michel', author_email='kmichel.info@gmail.com', url='https://github.com/kmichel/po-localization', download_url='https://github.com/kmichel/po-localization/archive/v{}.tar.gz'.format(__version__), keywords=['django', 'localization'], classifiers=[ 'Development Status :: 3 - Alpha', 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Localization', ], requires=['django'], )
26220ab6aa96b796a1250def81ae701dc8cf5a49
setup.py
setup.py
from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="supercast", author_email="gamzabaw@gmail.com", version="0.1.6", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/caststack/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], )
from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="sangwonl", author_email="gamzabaw@gmail.com", version="0.2.0", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/sangwonl/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], )
Set release version to 0.2.0
Set release version to 0.2.0 change repository for maintenance
Python
mit
caststack/python-mpd-parser,supercast-tv/python-mpd-parser
from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="supercast", author_email="gamzabaw@gmail.com", version="0.1.6", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/caststack/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], ) Set release version to 0.2.0 change repository for maintenance
from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="sangwonl", author_email="gamzabaw@gmail.com", version="0.2.0", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/sangwonl/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], )
<commit_before>from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="supercast", author_email="gamzabaw@gmail.com", version="0.1.6", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/caststack/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], ) <commit_msg>Set release version to 0.2.0 change repository for maintenance<commit_after>
from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="sangwonl", author_email="gamzabaw@gmail.com", version="0.2.0", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/sangwonl/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], )
from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="supercast", author_email="gamzabaw@gmail.com", version="0.1.6", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/caststack/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], ) Set release version to 0.2.0 change repository for maintenancefrom os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="sangwonl", author_email="gamzabaw@gmail.com", version="0.2.0", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/sangwonl/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], )
<commit_before>from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="supercast", author_email="gamzabaw@gmail.com", version="0.1.6", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/caststack/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], ) <commit_msg>Set release version to 0.2.0 change repository for maintenance<commit_after>from os.path import dirname, abspath, join, exists from setuptools import setup long_description = None if exists("README.md"): long_description = open("README.md").read() setup( name="mpegdash", packages=["mpegdash"], description="MPEG-DASH MPD(Media Presentation Description) Parser", long_description=long_description, author="sangwonl", author_email="gamzabaw@gmail.com", version="0.2.0", license="MIT", zip_safe=False, include_package_data=True, install_requires=["future"], url="https://github.com/sangwonl/python-mpegdash", tests_require=["unittest2"], test_suite="tests.my_module_suite", classifiers = [ "Programming Language :: Python", "Programming Language :: Python :: 3", "Environment :: Other Environment", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", ], )
319ccff1aa72185e5c02c8094bd1ce5118d94ccd
setup.py
setup.py
"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=['proselint', 'proselint.checks'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], })
"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=[ 'proselint', 'proselint.checks', 'proselint.checks.misc', 'proselint.checks.consistency', 'proselint.checks.pinker', 'proselint.checks.strunkwhite', 'proselint.checks.garner', 'proselint.checks.writegood', 'proselint.checks.wallace', 'proselint.checks.wallstreetjournal', 'proselint.checks.butterick'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], })
Improve importing for install mode
Improve importing for install mode
Python
bsd-3-clause
amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint,jstewmon/proselint,jstewmon/proselint
"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=['proselint', 'proselint.checks'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], }) Improve importing for install mode
"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=[ 'proselint', 'proselint.checks', 'proselint.checks.misc', 'proselint.checks.consistency', 'proselint.checks.pinker', 'proselint.checks.strunkwhite', 'proselint.checks.garner', 'proselint.checks.writegood', 'proselint.checks.wallace', 'proselint.checks.wallstreetjournal', 'proselint.checks.butterick'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], })
<commit_before>"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=['proselint', 'proselint.checks'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], }) <commit_msg>Improve importing for install mode<commit_after>
"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=[ 'proselint', 'proselint.checks', 'proselint.checks.misc', 'proselint.checks.consistency', 'proselint.checks.pinker', 'proselint.checks.strunkwhite', 'proselint.checks.garner', 'proselint.checks.writegood', 'proselint.checks.wallace', 'proselint.checks.wallstreetjournal', 'proselint.checks.butterick'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], })
"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=['proselint', 'proselint.checks'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], }) Improve importing for install mode"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=[ 'proselint', 'proselint.checks', 'proselint.checks.misc', 'proselint.checks.consistency', 'proselint.checks.pinker', 'proselint.checks.strunkwhite', 'proselint.checks.garner', 'proselint.checks.writegood', 'proselint.checks.wallace', 'proselint.checks.wallstreetjournal', 'proselint.checks.butterick'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], })
<commit_before>"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=['proselint', 'proselint.checks'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], }) <commit_msg>Improve importing for install mode<commit_after>"""Installation script for proselint.""" from setuptools import setup setup( name='proselint', version='0.1', description='Making your writing better', url='http://github.com/suchow/proselint', author='Jordan Suchow', author_email='suchow@post.harvard.edu', license='MIT', packages=[ 'proselint', 'proselint.checks', 'proselint.checks.misc', 'proselint.checks.consistency', 'proselint.checks.pinker', 'proselint.checks.strunkwhite', 'proselint.checks.garner', 'proselint.checks.writegood', 'proselint.checks.wallace', 'proselint.checks.wallstreetjournal', 'proselint.checks.butterick'], package_data={'': ['demo.md', '.proselintrc']}, zip_safe=False, entry_points={ 'console_scripts': [ 'proselint = proselint.command_line:proselint', ], })
6611fa93e3d534d9ccd3e954d07d1b12e170b3ff
setup.py
setup.py
#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], license="MIT", setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
Make explicit that the project is released under the MIT license.
Make explicit that the project is released under the MIT license.
Python
mit
jaraco/tempora
#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params) Make explicit that the project is released under the MIT license.
#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], license="MIT", setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
<commit_before>#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params) <commit_msg>Make explicit that the project is released under the MIT license.<commit_after>
#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], license="MIT", setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params) Make explicit that the project is released under the MIT license.#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], license="MIT", setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
<commit_before>#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params) <commit_msg>Make explicit that the project is released under the MIT license.<commit_after>#!/usr/bin/env python # Generated by jaraco.develop (https://bitbucket.org/jaraco/jaraco.develop) import setuptools with open('README.txt') as readme: long_description = readme.read() with open('CHANGES.txt') as changes: long_description += '\n\n' + changes.read() setup_params = dict( name='jaraco.timing', use_hg_version=True, author="Jason R. Coombs", author_email="jaraco@jaraco.com", description="jaraco.timing", long_description=long_description, url="https://bitbucket.org/jaraco/jaraco.timing", packages=setuptools.find_packages(), namespace_packages=['jaraco'], license="MIT", setup_requires=[ 'hgtools', 'pytest-runner', ], tests_require=[ 'pytest', ], classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", ], ) if __name__ == '__main__': setuptools.setup(**setup_params)
9100a0a1106ef4419c9b6a25898b83f80afbaecf
setup.py
setup.py
from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms>=2.93.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, )
from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms >= 3.0.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, )
Update to version with celery.
ZON-3409: Update to version with celery.
Python
bsd-3-clause
ZeitOnline/zeit.wysiwyg,ZeitOnline/zeit.wysiwyg
from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms>=2.93.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, ) ZON-3409: Update to version with celery.
from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms >= 3.0.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, )
<commit_before>from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms>=2.93.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, ) <commit_msg>ZON-3409: Update to version with celery.<commit_after>
from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms >= 3.0.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, )
from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms>=2.93.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, ) ZON-3409: Update to version with celery.from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms >= 3.0.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, )
<commit_before>from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms>=2.93.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, ) <commit_msg>ZON-3409: Update to version with celery.<commit_after>from setuptools import setup, find_packages setup( name='zeit.wysiwyg', version='2.0.8.dev0', author='gocept, Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi legacy WYSIWYG editor", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'gocept.fckeditor[fanstatic]>=2.6.4.1-2', 'lxml', 'pytz', 'rwproperty', 'setuptools', 'zc.iso8601', 'zc.resourcelibrary', 'zeit.cms >= 3.0.dev0', 'zeit.content.image>=2.13.6.dev0', 'zope.app.pagetemplate', 'zope.app.testing', 'zope.cachedescriptors', 'zope.component', 'zope.formlib', 'zope.interface', 'zope.security', 'zope.testing', 'zope.traversing', ], extras_require=dict(test=[ 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.portraitbox', ]), entry_points={ 'fanstatic.libraries': [ 'zeit_wysiwyg=zeit.wysiwyg.browser.resources:lib', ], }, )
7d214c5b9d013c95547d07aed79d02e454abee5d
setup.py
setup.py
from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.0', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], )
from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.1', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], )
Update release version to 0.3.1
Update release version to 0.3.1
Python
mit
jmenglund/pandas-validation
from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.0', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], ) Update release version to 0.3.1
from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.1', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], )
<commit_before>from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.0', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], ) <commit_msg>Update release version to 0.3.1<commit_after>
from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.1', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], )
from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.0', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], ) Update release version to 0.3.1from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.1', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], )
<commit_before>from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.0', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], ) <commit_msg>Update release version to 0.3.1<commit_after>from setuptools import setup, find_packages from os.path import join, dirname setup( name='pandas-validation', version='0.3.1', description=( 'A Python package for validating data with pandas'), long_description=open( join(dirname(__file__), 'README.rst'), encoding='utf-8').read(), packages=find_packages(exclude=['docs', 'tests*']), py_modules=['pandasvalidation'], install_requires=['pandas>=0.16'], author='Markus Englund', author_email='jan.markus.englund@gmail.com', url='https://github.com/jmenglund/pandas-validation', license='MIT', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6'], keywords=['pandas', 'validation'], )
22476ba2fcdded7e3ee7d3f1ed323229d9a308ce
setup.py
setup.py
try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main()
try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.h', 'rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main()
Add missing header file to the list of sources
Add missing header file to the list of sources
Python
lgpl-2.1
commx/python-rrdtool,commx/python-rrdtool
try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main() Add missing header file to the list of sources
try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.h', 'rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main()
<commit_before>try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main() <commit_msg>Add missing header file to the list of sources<commit_after>
try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.h', 'rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main()
try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main() Add missing header file to the list of sourcestry: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.h', 'rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main()
<commit_before>try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main() <commit_msg>Add missing header file to the list of sources<commit_after>try: from setuptools import setup from setuptools.extension import Extension except ImportError: from distutils.core import setup, Extension def main(): module = Extension('rrdtool', sources=['rrdtoolmodule.h', 'rrdtoolmodule.c'], include_dirs=['/usr/local/include'], library_dirs=['/usr/local/lib'], libraries=['rrd']) kwargs = dict( name='rrdtool', version='0.1.7', description='Python bindings for rrdtool', keywords=['rrdtool'], author='Christian Kroeger, Hye-Shik Chang', author_email='commx@commx.ws', license='LGPL', url='https://github.com/commx/python-rrdtool', ext_modules=[module], test_suite="tests" ) setup(**kwargs) if __name__ == '__main__': main()
3010973a9afc53842ee0f145a156b2083425cc2f
setup.py
setup.py
import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, long_description_content_type='text/markdown', url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
Set the description's content type.
Set the description's content type.
Python
agpl-3.0
etesync/journal-manager
import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], ) Set the description's content type.
import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, long_description_content_type='text/markdown', url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
<commit_before>import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], ) <commit_msg>Set the description's content type.<commit_after>
import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, long_description_content_type='text/markdown', url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], ) Set the description's content type.import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, long_description_content_type='text/markdown', url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
<commit_before>import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], ) <commit_msg>Set the description's content type.<commit_after>import os from setuptools import find_packages, setup with open(os.path.join(os.path.dirname(__file__), 'README.md')) as readme: README = readme.read() # allow setup.py to be run from any path os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) setup( name='django-etesync-journal', version='1.0.2', packages=find_packages(exclude=['tests*']), include_package_data=True, license='AGPL-3.0-only', description='The server side implementation of the EteSync protocol.', long_description=README, long_description_content_type='text/markdown', url='https://www.etesync.com/', author='EteSync', author_email='development@etesync.com', classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU Affero General Public License v3', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', ], )
20c44c97bb79e69ed91a125b9e550ebd29818a64
setup.py
setup.py
from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], )
from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python', "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], )
Add python compatibility tags for 2.7 and 3.6
Add python compatibility tags for 2.7 and 3.6
Python
bsd-3-clause
robgolding63/celery-s3,robgolding/celery-s3
from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], ) Add python compatibility tags for 2.7 and 3.6
from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python', "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], )
<commit_before>from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], ) <commit_msg>Add python compatibility tags for 2.7 and 3.6<commit_after>
from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python', "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], )
from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], ) Add python compatibility tags for 2.7 and 3.6from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python', "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], )
<commit_before>from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], ) <commit_msg>Add python compatibility tags for 2.7 and 3.6<commit_after>from setuptools import setup, find_packages requirements = [ 'boto>=2.8.0,<3.0', ] setup( name='celery-s3', version='0.1', description='An S3 result store backend for Celery', long_description=open('README.md').read(), author='Rob Golding', author_email='rob@robgolding.com', license='BSD', url='https://github.com/robgolding63/celery-s3', download_url='https://github.com/robgolding63/celery-s3/downloads', packages=find_packages(), include_package_data=True, install_requires=requirements, tests_require=requirements + [ 'celery==4.1.0', ], test_suite='celery_s3.tests', classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python', "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", 'License :: OSI Approved :: BSD License', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System :: Distributed Computing', 'Programming Language :: Python', 'Operating System :: OS Independent', ], )
83fda990174df238496b833f13c8dab32ee19f05
setup.py
setup.py
from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.7.3', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.8', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
Upgrade pyflakes from 0.7.3 to 0.8
Upgrade pyflakes from 0.7.3 to 0.8
Python
mit
TangledWeb/tangled
from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.7.3', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], ) Upgrade pyflakes from 0.7.3 to 0.8
from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.8', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
<commit_before>from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.7.3', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], ) <commit_msg>Upgrade pyflakes from 0.7.3 to 0.8<commit_after>
from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.8', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.7.3', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], ) Upgrade pyflakes from 0.7.3 to 0.8from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.8', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
<commit_before>from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.7.3', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], ) <commit_msg>Upgrade pyflakes from 0.7.3 to 0.8<commit_after>from setuptools import setup setup( name='tangled', version='0.1a8.dev0', description='Tangled namespace and utilities', long_description=open('README.rst').read(), url='http://tangledframework.org/', download_url='https://github.com/TangledWeb/tangled/tags', author='Wyatt Baldwin', author_email='self@wyattbaldwin.com', packages=[ 'tangled', 'tangled.scripts', 'tangled.tests', 'tangled.tests.dummy_package', ], extras_require={ 'dev': ( 'coverage>=3.7.1', 'nose>=1.3.1', 'pep8>=1.4.6', 'pyflakes>=0.8', 'Sphinx>=1.2.2', 'sphinx_rtd_theme>=0.1.5', ) }, entry_points=""" [console_scripts] tangled = tangled.__main__:main [tangled.scripts] release = tangled.scripts:ReleaseCommand scaffold = tangled.scripts:ScaffoldCommand python = tangled.scripts:ShellCommand test = tangled.scripts:TestCommand """, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
7e8f5707a864e5ee115b46384b6206cc87dffe72
setup.py
setup.py
#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" )
#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js', 'www/tupelo.css', 'www/buttons.css']), ('share/tupelo/www/img', ['www/img/bg.png', 'www/img/bg-button.gif'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" )
Add css and img files to bdist
Add css and img files to bdist
Python
bsd-3-clause
jait/tupelo
#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" ) Add css and img files to bdist
#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js', 'www/tupelo.css', 'www/buttons.css']), ('share/tupelo/www/img', ['www/img/bg.png', 'www/img/bg-button.gif'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" )
<commit_before>#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" ) <commit_msg>Add css and img files to bdist<commit_after>
#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js', 'www/tupelo.css', 'www/buttons.css']), ('share/tupelo/www/img', ['www/img/bg.png', 'www/img/bg-button.gif'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" )
#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" ) Add css and img files to bdist#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js', 'www/tupelo.css', 'www/buttons.css']), ('share/tupelo/www/img', ['www/img/bg.png', 'www/img/bg-button.gif'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" )
<commit_before>#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" ) <commit_msg>Add css and img files to bdist<commit_after>#!/usr/bin/env python # vim: set sts=4 sw=4 et: try: from setuptools import setup except ImportError: from distutils.core import setup VERSION = "0.0.4" setup (name = "tupelo", description = "Random code around a card game called Tuppi", version = VERSION, author = "Jari Tenhunen", author_email = "jari.tenhunen@iki.fi", license = "BSD", packages = ['tupelo'], scripts = ['scripts/tupelo', 'scripts/tupelo-server'], data_files = [('share/tupelo/www', ['www/index.html', 'www/tupelo.js', 'www/tupelo-main.js', 'www/tupelo.css', 'www/buttons.css']), ('share/tupelo/www/img', ['www/img/bg.png', 'www/img/bg-button.gif'])], platforms="Python 2.5 and later.", test_suite = "nose.collector" )
eccc88e8fc336b18348b3f7369a538fcc7d07c1a
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] )
#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download_url='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] )
Correct parameter name is download_url.
Correct parameter name is download_url.
Python
mit
UpCloudLtd/upcloud-python-api
#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] ) Correct parameter name is download_url.
#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download_url='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] )
<commit_before>#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] ) <commit_msg>Correct parameter name is download_url.<commit_after>
#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download_url='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] )
#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] ) Correct parameter name is download_url.#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download_url='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] )
<commit_before>#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] ) <commit_msg>Correct parameter name is download_url.<commit_after>#!/usr/bin/env python from setuptools import setup setup( name='upcloud-api', version='0.4.5', description='UpCloud API Client', author='Elias Nygren', maintainer='Mika Lackman', maintainer_email='mika.lackman@upcloud.com', url='https://github.com/UpCloudLtd/upcloud-python-api', packages=['upcloud_api', 'upcloud_api.cloud_manager'], download_url='https://github.com/UpCloudLtd/upcloud-python-api/archive/0.4.5.tar.gz', license='MIT', install_requires=[ 'requests>=2.6.0', 'six>=1.9.0' ] )
773c0f6a5b94d502881880c922c4b2ad19b60953
setup.py
setup.py
import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, )
import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, )
Add Python 3 classifier trove
Add Python 3 classifier trove
Python
bsd-3-clause
chrismeyersfsu/django-jsonfield
import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, ) Add Python 3 classifier trove
import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, )
<commit_before>import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, ) <commit_msg>Add Python 3 classifier trove<commit_after>
import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, )
import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, ) Add Python 3 classifier troveimport os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, )
<commit_before>import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, ) <commit_msg>Add Python 3 classifier trove<commit_after>import os from setuptools import setup setup( name = "django-jsonfield", version = open(os.path.join(os.path.dirname(__file__), 'jsonfield', 'VERSION')).read().strip(), description = "JSONField for django models", long_description = open("README.rst").read(), url = "http://bitbucket.org/schinckel/django-jsonfield/", author = "Matthew Schinckel", author_email = "matt@schinckel.net", packages = [ "jsonfield", ], classifiers = [ 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Framework :: Django', ], test_suite='tests.main', include_package_data=True, )
6802be4498bb1143f4ce4c024a3fd82921eeb937
setup.py
setup.py
#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'PIL==1.1.7', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, )
#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'Pillow==2.5.2', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, )
Use pillow instead of ordinary pil
Use pillow instead of ordinary pil
Python
bsd-2-clause
bogdal/calibre-books,bogdal/calibre-books
#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'PIL==1.1.7', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, ) Use pillow instead of ordinary pil
#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'Pillow==2.5.2', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, )
<commit_before>#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'PIL==1.1.7', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, ) <commit_msg>Use pillow instead of ordinary pil<commit_after>
#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'Pillow==2.5.2', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, )
#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'PIL==1.1.7', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, ) Use pillow instead of ordinary pil#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'Pillow==2.5.2', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, )
<commit_before>#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'PIL==1.1.7', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, ) <commit_msg>Use pillow instead of ordinary pil<commit_after>#! /usr/bin/env python # -*- coding: utf-8 -*- from setuptools import setup, find_packages import os os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'calibre_books.settings') setup( name='calibre-books', author='Adam Bogdał', author_email='adam@bogdal.pl', description="Calibre server in Django", license='BSD', version='0.0.1', packages=find_packages(), include_package_data=True, install_requires=[ 'Django>=1.6', 'django-bootstrap3>=4.8.2', 'django-dropbox==0.0.2', 'dj_database_url>=0.2.2', 'python-memcached==1.53', 'Pillow==2.5.2', 'gunicorn==19.1.0', 'psycopg2==2.5.3', ], entry_points={ 'console_scripts': ['manage.py = calibre_books:manage']}, )
273786a0e830bd582294419c5a93211552e692ba
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.1', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements )
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.2', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements )
Add int detection to interpreting results
Add int detection to interpreting results
Python
bsd-3-clause
iansmcf/pash
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.1', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements ) Add int detection to interpreting results
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.2', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.1', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements ) <commit_msg>Add int detection to interpreting results<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.2', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements )
#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.1', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements ) Add int detection to interpreting results#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.2', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements )
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.1', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements ) <commit_msg>Add int detection to interpreting results<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- try: from setuptools import setup except ImportError: from distutils.core import setup with open('README.rst') as readme_file: readme = readme_file.read() with open('HISTORY.rst') as history_file: history = history_file.read().replace('.. :changelog:', '') requirements = [ # TODO: put package requirements here ] test_requirements = [ # TODO: put package test requirements here ] setup( name='pash', version='1.1.2', description="Module for interacting with os.subprocess easily.", long_description=readme + '\n\n' + history, author="Ian McFarlane", author_email='iansmcfarlane@gmail.com', url='https://github.com/iansmcf/pash', py_modules = ['pash'], include_package_data=True, install_requires=requirements, license="BSD", zip_safe=False, keywords='pash', classifiers=[ 'Development Status :: 2 - Pre-Alpha', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Natural Language :: English', 'Programming Language :: Python :: 2.7', ], test_suite='tests', tests_require=test_requirements )
6524d4711e5fa03b1f11979fd3d0319cd268d116
setup.py
setup.py
# -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], )
# -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], entry_points={"console_scripts":"ref=refmanage.refmanage:main"},)
Add entry_point for command-line application
Add entry_point for command-line application Closes #31.
Python
mit
jrsmith3/refmanage
# -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], )Add entry_point for command-line application Closes #31.
# -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], entry_points={"console_scripts":"ref=refmanage.refmanage:main"},)
<commit_before># -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], )<commit_msg>Add entry_point for command-line application Closes #31.<commit_after>
# -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], entry_points={"console_scripts":"ref=refmanage.refmanage:main"},)
# -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], )Add entry_point for command-line application Closes #31.# -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], entry_points={"console_scripts":"ref=refmanage.refmanage:main"},)
<commit_before># -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], )<commit_msg>Add entry_point for command-line application Closes #31.<commit_after># -*- coding: utf-8 -*- from distutils.core import setup import refmanage setup(name="refmanage", version=refmanage.__version__, author="Joshua Ryan Smith", author_email="joshua.r.smith@gmail.com", packages=["refmanage"], url="https://github.com/jrsmith3/refmanage", description="Manage a BibTeX database", classifiers=["Programming Language :: Python", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Development Status :: 3 - Alpha", "Intended Audience :: Science/Research", "Topic :: Text Processing", "Natural Language :: English", ], install_requires=["pybtex"], entry_points={"console_scripts":"ref=refmanage.refmanage:main"},)
e4d4e1b79bea641c66dfafe486d94a87c63e6edb
setup.py
setup.py
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/blancltd/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', )
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/developersociety/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', )
Update GitHub repos from blancltd to developersociety
Update GitHub repos from blancltd to developersociety
Python
bsd-3-clause
blancltd/django-latest-tweets
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/blancltd/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', ) Update GitHub repos from blancltd to developersociety
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/developersociety/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', )
<commit_before>#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/blancltd/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', ) <commit_msg>Update GitHub repos from blancltd to developersociety<commit_after>
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/developersociety/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', )
#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/blancltd/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', ) Update GitHub repos from blancltd to developersociety#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/developersociety/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', )
<commit_before>#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/blancltd/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', ) <commit_msg>Update GitHub repos from blancltd to developersociety<commit_after>#!/usr/bin/env python from codecs import open from setuptools import find_packages, setup with open('README.rst', 'r', 'utf-8') as f: readme = f.read() setup( name='django-latest-tweets', version='0.4.5', description='Latest Tweets for Django', long_description=readme, url='https://github.com/developersociety/django-latest-tweets', maintainer='Blanc Ltd', maintainer_email='studio@blanc.ltd.uk', platforms=['any'], install_requires=[ 'twitter>=1.9.1', 'requests>=2.0', ], packages=find_packages(), include_package_data=True, classifiers=[ 'Environment :: Web Environment', 'Framework :: Django', 'Framework :: Django :: 1.8', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], license='BSD', )