commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
8c950b364cc22d800cfac7af347b6bed3d012d6b
pyseeta/config.py
pyseeta/config.py
import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' }, # Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2' 'linux2': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
Fix crash when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
Fix crash when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
Python
mit
TuXiaokang/pyseeta
import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier']) Fix crash when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'
import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' }, # Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2' 'linux2': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
<commit_before> import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier']) <commit_msg>Fix crash when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'<commit_after>
import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' }, # Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2' 'linux2': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier']) Fix crash when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2' import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' }, # Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2' 'linux2': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
<commit_before> import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier']) <commit_msg>Fix crash when Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2'<commit_after> import os import sys config = { 'win32': { 'detector': 'seeta_fd_lib.dll', 'aligner': 'seeta_fa_lib.dll', 'identifier': 'seeta_fi_lib.dll' }, 'darwin': { 'detector': 'libseeta_fd_lib.dylib', 'aligner': 'libseeta_fa_lib.dylib', 'identifier': 'libseeta_fi_lib.dylib' }, 'linux': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' }, # Ubuntu 16.04 x64 Python 2.7.12 (default, Nov 19 2016, 06:48:10) sys.platform return 'linux2' 'linux2': { 'detector': 'libseeta_fd_lib.so', 'aligner': 'libseeta_fa_lib.so', 'identifier': 'libseeta_fi_lib.so' } } def get_detector_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['detector']) def get_aligner_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['aligner']) def get_identifier_library(): dir = os.path.dirname(__file__) return os.path.join(dir, '../SeetaFaceEngine/Release', config[sys.platform]['identifier'])
21a4c6c5cdf3461ef2bd6048a7399044e8b1a0e8
spyder_unittest/backend/pytestworker.py
spyder_unittest/backend/pytestworker.py
# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest pytest.main(sys.argv[1:])
# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest class SpyderPlugin(): """Pytest plugin which reports in format suitable for Spyder.""" def pytest_itemcollected(self, item): """Called by py.test when a test item is collected.""" name = item.name module = item.parent.name module = module.replace('/', '.') # convert path to dotted path if module.endswith('.py'): module = module[:-3] print('pytest_item_collected(name={}, module={})'.format(name, module)) pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
Add py.test plugin which prints out test names as they are collected
Add py.test plugin which prints out test names as they are collected
Python
mit
jitseniesen/spyder-unittest
# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest pytest.main(sys.argv[1:]) Add py.test plugin which prints out test names as they are collected
# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest class SpyderPlugin(): """Pytest plugin which reports in format suitable for Spyder.""" def pytest_itemcollected(self, item): """Called by py.test when a test item is collected.""" name = item.name module = item.parent.name module = module.replace('/', '.') # convert path to dotted path if module.endswith('.py'): module = module[:-3] print('pytest_item_collected(name={}, module={})'.format(name, module)) pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
<commit_before># -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest pytest.main(sys.argv[1:]) <commit_msg>Add py.test plugin which prints out test names as they are collected<commit_after>
# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest class SpyderPlugin(): """Pytest plugin which reports in format suitable for Spyder.""" def pytest_itemcollected(self, item): """Called by py.test when a test item is collected.""" name = item.name module = item.parent.name module = module.replace('/', '.') # convert path to dotted path if module.endswith('.py'): module = module[:-3] print('pytest_item_collected(name={}, module={})'.format(name, module)) pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest pytest.main(sys.argv[1:]) Add py.test plugin which prints out test names as they are collected# -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest class SpyderPlugin(): """Pytest plugin which reports in format suitable for Spyder.""" def pytest_itemcollected(self, item): """Called by py.test when a test item is collected.""" name = item.name module = item.parent.name module = module.replace('/', '.') # convert path to dotted path if module.endswith('.py'): module = module[:-3] print('pytest_item_collected(name={}, module={})'.format(name, module)) pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
<commit_before># -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest pytest.main(sys.argv[1:]) <commit_msg>Add py.test plugin which prints out test names as they are collected<commit_after># -*- coding: utf-8 -*- # # Copyright © 2017 Spyder Project Contributors # Licensed under the terms of the MIT License # (see LICENSE.txt for details) """ Script for running py.test tests. This script is meant to be run in a separate process by a PyTestRunner. It runs tests via the py.test framework and prints the results so that the PyTestRunner can read them. """ # Standard library imports import sys # Third party imports import pytest class SpyderPlugin(): """Pytest plugin which reports in format suitable for Spyder.""" def pytest_itemcollected(self, item): """Called by py.test when a test item is collected.""" name = item.name module = item.parent.name module = module.replace('/', '.') # convert path to dotted path if module.endswith('.py'): module = module[:-3] print('pytest_item_collected(name={}, module={})'.format(name, module)) pytest.main(sys.argv[1:], plugins=[SpyderPlugin()])
2fdc5943bc6f23c6d56d6bb86e6a5bf33338237e
digikey/admin.py
digikey/admin.py
from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price')
from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_filter = ['sent', 'paid', 'sent_date'] list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): list_filter = ['ordered', 'orderdate'] fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price')
Add Filter option for order amd order group
Add Filter option for order amd order group
Python
mit
sonicyang/chiphub,sonicyang/chiphub,sonicyang/chiphub
from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price') Add Filter option for order amd order group
from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_filter = ['sent', 'paid', 'sent_date'] list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): list_filter = ['ordered', 'orderdate'] fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price')
<commit_before>from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price') <commit_msg>Add Filter option for order amd order group<commit_after>
from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_filter = ['sent', 'paid', 'sent_date'] list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): list_filter = ['ordered', 'orderdate'] fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price')
from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price') Add Filter option for order amd order groupfrom django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_filter = ['sent', 'paid', 'sent_date'] list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): list_filter = ['ordered', 'orderdate'] fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price')
<commit_before>from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price') <commit_msg>Add Filter option for order amd order group<commit_after>from django.contrib import admin from digikey.models import Components, Orders, Order_Details, Groups class ComponentInline(admin.TabularInline): model = Order_Details extra = 1 class OrderInline(admin.TabularInline): model = Orders extra = 1 fieldsets = [ ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] @admin.register(Orders) class OrderAdmin(admin.ModelAdmin): list_filter = ['sent', 'paid', 'sent_date'] list_display = ('pk', 'group_id','paid', 'sent') fieldsets = [ (None, {'fields': ['Orderer', 'group_id']}), ('Payment information', {'fields': ['paid', 'paid_account', 'paid_date']}), ('Uplink information', {'fields': ['sent', 'sent_date']}), ('Additional Shipping information', {'fields': ['receiver', 'shipping_address', 'phone_number']}), ] inlines = [ComponentInline] @admin.register(Groups) class GroupAdmin(admin.ModelAdmin): list_filter = ['ordered', 'orderdate'] fieldsets = [ (None, {'fields': ['ordered', 'orderdate']}), ] inlines = [OrderInline] @admin.register(Components) class ComponentAdmin(admin.ModelAdmin): list_display = ('part_number','unit_price')
de2568dd1feec001098574e28848ca0ef8bca475
search/sorting.py
search/sorting.py
def sort_by_popularity(items): return sorted(items, key=lambda product: product.popularity, reverse=True)
def sort_by_property(prop): def _sort_by_property(items): return sorted(items, key=lambda item: getattr(item, prop), reverse=True) return _sort_by_property sort_by_popularity = sort_by_property('popularity')
Redefine sort function to a generic one. Some overhead but can use for any property
Redefine sort function to a generic one. Some overhead but can use for any property
Python
mit
vanng822/geosearch,vanng822/geosearch,vanng822/geosearch
def sort_by_popularity(items): return sorted(items, key=lambda product: product.popularity, reverse=True) Redefine sort function to a generic one. Some overhead but can use for any property
def sort_by_property(prop): def _sort_by_property(items): return sorted(items, key=lambda item: getattr(item, prop), reverse=True) return _sort_by_property sort_by_popularity = sort_by_property('popularity')
<commit_before> def sort_by_popularity(items): return sorted(items, key=lambda product: product.popularity, reverse=True) <commit_msg>Redefine sort function to a generic one. Some overhead but can use for any property<commit_after>
def sort_by_property(prop): def _sort_by_property(items): return sorted(items, key=lambda item: getattr(item, prop), reverse=True) return _sort_by_property sort_by_popularity = sort_by_property('popularity')
def sort_by_popularity(items): return sorted(items, key=lambda product: product.popularity, reverse=True) Redefine sort function to a generic one. Some overhead but can use for any property def sort_by_property(prop): def _sort_by_property(items): return sorted(items, key=lambda item: getattr(item, prop), reverse=True) return _sort_by_property sort_by_popularity = sort_by_property('popularity')
<commit_before> def sort_by_popularity(items): return sorted(items, key=lambda product: product.popularity, reverse=True) <commit_msg>Redefine sort function to a generic one. Some overhead but can use for any property<commit_after> def sort_by_property(prop): def _sort_by_property(items): return sorted(items, key=lambda item: getattr(item, prop), reverse=True) return _sort_by_property sort_by_popularity = sort_by_property('popularity')
729de81f7d76b50d1d6810bc23fab45c60020091
hel/utils/tests/__init__.py
hel/utils/tests/__init__.py
# -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched
# -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched
Add blank lines the PEP0008 check missed so much
Add blank lines the PEP0008 check missed so much
Python
mit
hel-repo/hel,MoonlightOwl/hel,MoonlightOwl/hel,hel-repo/hel,MoonlightOwl/hel
# -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched Add blank lines the PEP0008 check missed so much
# -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched
<commit_before># -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched <commit_msg>Add blank lines the PEP0008 check missed so much<commit_after>
# -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched
# -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched Add blank lines the PEP0008 check missed so much# -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched
<commit_before># -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched <commit_msg>Add blank lines the PEP0008 check missed so much<commit_after># -*- coding: utf-8 -*- # http://stackoverflow.com/a/8866661 def are_equal(a, b): unmatched = list(b) for element in a: try: unmatched.remove(element) except ValueError: return False return not unmatched
05001e4a60d7fe0babf8f66ba9a5cecd2ffc4e85
geotrek/trekking/templatetags/trekking_tags.py
geotrek/trekking/templatetags/trekking_tags.py
from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute
from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ if value is None: return u"" seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute
Fix pretty duration for null value
Fix pretty duration for null value
Python
bsd-2-clause
mabhub/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,Anaethelion/Geotrek,mabhub/Geotrek,mabhub/Geotrek,johan--/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,johan--/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,GeotrekCE/Geotrek-admin
from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute Fix pretty duration for null value
from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ if value is None: return u"" seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute
<commit_before>from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute <commit_msg>Fix pretty duration for null value<commit_after>
from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ if value is None: return u"" seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute
from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute Fix pretty duration for null valuefrom datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ if value is None: return u"" seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute
<commit_before>from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute <commit_msg>Fix pretty duration for null value<commit_after>from datetime import datetime, timedelta from django import template from django.conf import settings from django.utils.translation import ugettext_lazy as _ register = template.Library() @register.filter def duration(value): """ Returns a duration in hours to a human readable version (minutes, days, ...) """ if value is None: return u"" seconds = timedelta(minutes=float(value) * 60) duration = datetime(1, 1, 1) + seconds days = duration.day - 1 if days >= 8: return _("More than %s days") % 8 if days > 1: return _("%s days") % days if days <= 1: hours = (settings.TREK_DAY_DURATION * days) + duration.hour if hours > settings.TREK_DAY_DURATION: return _("%s days") % 2 if duration.hour > 0 and duration.minute > 0: return _("%(hour)s h %(min)s") % {'hour': duration.hour, 'min': duration.minute} if duration.hour > 0: return _("%(hour)s h") % {'hour': duration.hour} return _("%s min") % duration.minute
d5817c04a57d8c593335450b0613fd683fdf5aec
pycalphad/__init__.py
pycalphad/__init__.py
#first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions
# This unfortunate monkey patch is necessary to make Py27, Py33 and Py34 work # Source: http://stackoverflow.com/questions/34124270/pickling-method-descriptor-objects-in-python # first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions
Add source of pickle hack
DOC: Add source of pickle hack
Python
mit
tkphd/pycalphad,tkphd/pycalphad,tkphd/pycalphad
#first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions DOC: Add source of pickle hack
# This unfortunate monkey patch is necessary to make Py27, Py33 and Py34 work # Source: http://stackoverflow.com/questions/34124270/pickling-method-descriptor-objects-in-python # first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions
<commit_before>#first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions <commit_msg>DOC: Add source of pickle hack<commit_after>
# This unfortunate monkey patch is necessary to make Py27, Py33 and Py34 work # Source: http://stackoverflow.com/questions/34124270/pickling-method-descriptor-objects-in-python # first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions
#first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions DOC: Add source of pickle hack# This unfortunate monkey patch is necessary to make Py27, Py33 and Py34 work # Source: http://stackoverflow.com/questions/34124270/pickling-method-descriptor-objects-in-python # first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions
<commit_before>#first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions <commit_msg>DOC: Add source of pickle hack<commit_after># This unfortunate monkey patch is necessary to make Py27, Py33 and Py34 work # Source: http://stackoverflow.com/questions/34124270/pickling-method-descriptor-objects-in-python # first import dill, which populates itself into pickle's dispatch import dill import pickle # save the MethodDescriptorType from dill MethodDescriptorType = type(type.__dict__['mro']) if pickle.__dict__.get('_Pickler', None): MethodDescriptorWrapper = pickle._Pickler.dispatch[MethodDescriptorType] else: MethodDescriptorWrapper = pickle.Pickler.dispatch[MethodDescriptorType] # cloudpickle does the same, so let it update the dispatch table import cloudpickle # now, put the saved MethodDescriptorType back in if pickle.__dict__.get('_Pickler', None): pickle._Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper else: pickle.Pickler.dispatch[MethodDescriptorType] = MethodDescriptorWrapper import pycalphad.variables as v from pycalphad.model import Model from pycalphad.io.database import Database # Trigger format extension hooks import pycalphad.io.tdb from pycalphad.core.calculate import calculate from pycalphad.core.equilibrium import equilibrium from pycalphad.core.equilibrium import EquilibriumError, ConditionError from pycalphad.plot.binary import binplot from pycalphad.plot.eqplot import eqplot from ._version import get_versions __version__ = get_versions()['version'] del get_versions
acec33cacd117b437dc417b1934c839ebec5d07a
compute_pi.py
compute_pi.py
"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey–Borwein–Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision))
"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey-Borwein-Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision))
Remove non-ascii-chars in BBP name
Remove non-ascii-chars in BBP name
Python
mit
jakobkogler/pi_memorize
"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey–Borwein–Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision)) Remove non-ascii-chars in BBP name
"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey-Borwein-Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision))
<commit_before>"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey–Borwein–Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision)) <commit_msg>Remove non-ascii-chars in BBP name<commit_after>
"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey-Borwein-Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision))
"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey–Borwein–Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision)) Remove non-ascii-chars in BBP name"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey-Borwein-Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision))
<commit_before>"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey–Borwein–Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision)) <commit_msg>Remove non-ascii-chars in BBP name<commit_after>"""Compute pi.""" from decimal import Decimal, getcontext import argparse import itertools class ComputePi: """Compute pi to a specific precision using multiple algorithms.""" @staticmethod def BBP(precision): """Compute pi using the Bailey-Borwein-Plouffe formula.""" getcontext().prec = precision + 20 pi = Decimal(0) for k in itertools.count(): term = (Decimal(4)/(8*k+1) - Decimal(2)/(8*k+4) - Decimal(1)/(8*k+5) - Decimal(1)/(8*k+6)) term /= Decimal(16)**k pi += term if term < Decimal(10)**(-precision-10): break pi = str(pi)[:-19] return pi if __name__ == '__main__': parser = argparse.ArgumentParser(description='Calculates pi.') parser.add_argument('--precision', type=int, default=100, help='The desired precision of pi (default: 100 digits)') args = parser.parse_args() pi_computer = ComputePi() print(pi_computer.BBP(args.precision))
27a33628310cbd68632f0e8b514de731a033f8e6
IPython/utils/tests/test_shimmodule.py
IPython/utils/tests/test_shimmodule.py
import sys import warnings from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") import IPython.config assert len(w) == 1 assert issubclass(w[-1].category, ShimWarning)
import pytest import sys from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with pytest.warns(ShimWarning): import IPython.config
Make test_shim_warning not fail on unrelated warnings
Make test_shim_warning not fail on unrelated warnings
Python
bsd-3-clause
ipython/ipython,ipython/ipython
import sys import warnings from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") import IPython.config assert len(w) == 1 assert issubclass(w[-1].category, ShimWarning) Make test_shim_warning not fail on unrelated warnings
import pytest import sys from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with pytest.warns(ShimWarning): import IPython.config
<commit_before>import sys import warnings from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") import IPython.config assert len(w) == 1 assert issubclass(w[-1].category, ShimWarning) <commit_msg>Make test_shim_warning not fail on unrelated warnings<commit_after>
import pytest import sys from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with pytest.warns(ShimWarning): import IPython.config
import sys import warnings from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") import IPython.config assert len(w) == 1 assert issubclass(w[-1].category, ShimWarning) Make test_shim_warning not fail on unrelated warningsimport pytest import sys from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with pytest.warns(ShimWarning): import IPython.config
<commit_before>import sys import warnings from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") import IPython.config assert len(w) == 1 assert issubclass(w[-1].category, ShimWarning) <commit_msg>Make test_shim_warning not fail on unrelated warnings<commit_after>import pytest import sys from IPython.utils.shimmodule import ShimWarning def test_shim_warning(): sys.modules.pop('IPython.config', None) with pytest.warns(ShimWarning): import IPython.config
ab1cb4b9a0bae34e0f573602b7d45a4a9e5c79a5
alexBot/cogs/fun.py
alexBot/cogs/fun.py
import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): """contains the on message for ayy""" async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot))
import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): @commands.command() async def cat(self, ctx): cat = await get_json(self.bot.session, 'http://random.cat/meow') ret = discord.Embed() ret.set_image(url=cat['file']) await ctx.send(embed=ret) @commands.command() async def dog(self, ctx): dog = None while dog is None or dog['url'][-3:].lower() == 'mp4': dog = await get_json(self.bot.session, 'https://random.dog/woof.json') log.debug(dog['url']) ret = discord.Embed() ret.set_image(url=dog['url']) await ctx.send(embed=ret) async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot))
Revert "remove dog and cat commands( see random.cat/help.html for some background on this)"
Revert "remove dog and cat commands( see random.cat/help.html for some background on this)" This reverts commit 2195ec2a3eea800201dc177fa674b39a440d4ac2.
Python
mit
mralext20/alex-bot
import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): """contains the on message for ayy""" async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot)) Revert "remove dog and cat commands( see random.cat/help.html for some background on this)" This reverts commit 2195ec2a3eea800201dc177fa674b39a440d4ac2.
import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): @commands.command() async def cat(self, ctx): cat = await get_json(self.bot.session, 'http://random.cat/meow') ret = discord.Embed() ret.set_image(url=cat['file']) await ctx.send(embed=ret) @commands.command() async def dog(self, ctx): dog = None while dog is None or dog['url'][-3:].lower() == 'mp4': dog = await get_json(self.bot.session, 'https://random.dog/woof.json') log.debug(dog['url']) ret = discord.Embed() ret.set_image(url=dog['url']) await ctx.send(embed=ret) async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot))
<commit_before>import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): """contains the on message for ayy""" async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot)) <commit_msg>Revert "remove dog and cat commands( see random.cat/help.html for some background on this)" This reverts commit 2195ec2a3eea800201dc177fa674b39a440d4ac2.<commit_after>
import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): @commands.command() async def cat(self, ctx): cat = await get_json(self.bot.session, 'http://random.cat/meow') ret = discord.Embed() ret.set_image(url=cat['file']) await ctx.send(embed=ret) @commands.command() async def dog(self, ctx): dog = None while dog is None or dog['url'][-3:].lower() == 'mp4': dog = await get_json(self.bot.session, 'https://random.dog/woof.json') log.debug(dog['url']) ret = discord.Embed() ret.set_image(url=dog['url']) await ctx.send(embed=ret) async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot))
import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): """contains the on message for ayy""" async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot)) Revert "remove dog and cat commands( see random.cat/help.html for some background on this)" This reverts commit 2195ec2a3eea800201dc177fa674b39a440d4ac2.import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): @commands.command() async def cat(self, ctx): cat = await get_json(self.bot.session, 'http://random.cat/meow') ret = discord.Embed() ret.set_image(url=cat['file']) await ctx.send(embed=ret) @commands.command() async def dog(self, ctx): dog = None while dog is None or dog['url'][-3:].lower() == 'mp4': dog = await get_json(self.bot.session, 'https://random.dog/woof.json') log.debug(dog['url']) ret = discord.Embed() ret.set_image(url=dog['url']) await ctx.send(embed=ret) async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot))
<commit_before>import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): """contains the on message for ayy""" async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot)) <commit_msg>Revert "remove dog and cat commands( see random.cat/help.html for some background on this)" This reverts commit 2195ec2a3eea800201dc177fa674b39a440d4ac2.<commit_after>import logging import re from ..tools import Cog from ..tools import get_guild_config log = logging.getLogger(__name__) ayygen = re.compile('[aA][yY][Yy][yY]*') class Fun(Cog): @commands.command() async def cat(self, ctx): cat = await get_json(self.bot.session, 'http://random.cat/meow') ret = discord.Embed() ret.set_image(url=cat['file']) await ctx.send(embed=ret) @commands.command() async def dog(self, ctx): dog = None while dog is None or dog['url'][-3:].lower() == 'mp4': dog = await get_json(self.bot.session, 'https://random.dog/woof.json') log.debug(dog['url']) ret = discord.Embed() ret.set_image(url=dog['url']) await ctx.send(embed=ret) async def on_message(self, message): if self.bot.location == 'laptop' or message.guild is None: return if (await get_guild_config(self.bot, message.guild.id))['ayy'] is False: return if ayygen.fullmatch(message.content): await message.channel.send("lmao") def setup(bot): bot.add_cog(Fun(bot))
8c01b3536026d56abb42daaf9d300e53e7c6dc18
detox/main.py
detox/main.py
import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) return retcode
import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) raise SystemExit(retcode)
Raise system code on exit from `python -m detox`
Raise system code on exit from `python -m detox`
Python
mit
tox-dev/detox
import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) return retcode Raise system code on exit from `python -m detox`
import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) raise SystemExit(retcode)
<commit_before>import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) return retcode <commit_msg>Raise system code on exit from `python -m detox`<commit_after>
import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) raise SystemExit(retcode)
import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) return retcode Raise system code on exit from `python -m detox`import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) raise SystemExit(retcode)
<commit_before>import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) return retcode <commit_msg>Raise system code on exit from `python -m detox`<commit_after>import sys import py import detox from detox.proc import Detox def parse(args): from tox.session import prepare return prepare(args) def main(args=None): if args is None: args = sys.argv[1:] config = parse(args) #now = py.std.time.time() detox = Detox(config) detox.startloopreport() retcode = detox.runtestsmulti(config.envlist) #elapsed = py.std.time.time() - now #cumulated = detox.toxsession.report.cumulated_time #detox.toxsession.report.line( # "detox speed-up: %.2f (elapsed %.2f, cumulated %.2f)" % ( # cumulated / elapsed, elapsed, cumulated), bold=True) raise SystemExit(retcode)
97b7ba9d4d6bf948435ce58dd21b60d78d75fd29
lib-dynload/lzo/__init__.py
lib-dynload/lzo/__init__.py
import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") compress = module.compress decompress = module.decompress sys.path.pop(0) break
import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") module.set_block_size(16*1024*1024) compress = module.compress decompress = module.decompress sys.path.pop(0) break
Adjust maximum block size for lzo
Adjust maximum block size for lzo
Python
mit
sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs,sergey-dryabzhinsky/dedupsqlfs
import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") compress = module.compress decompress = module.decompress sys.path.pop(0) break Adjust maximum block size for lzo
import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") module.set_block_size(16*1024*1024) compress = module.compress decompress = module.decompress sys.path.pop(0) break
<commit_before>import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") compress = module.compress decompress = module.decompress sys.path.pop(0) break <commit_msg>Adjust maximum block size for lzo<commit_after>
import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") module.set_block_size(16*1024*1024) compress = module.compress decompress = module.decompress sys.path.pop(0) break
import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") compress = module.compress decompress = module.decompress sys.path.pop(0) break Adjust maximum block size for lzoimport sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") module.set_block_size(16*1024*1024) compress = module.compress decompress = module.decompress sys.path.pop(0) break
<commit_before>import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") compress = module.compress decompress = module.decompress sys.path.pop(0) break <commit_msg>Adjust maximum block size for lzo<commit_after>import sys import os p1, p2 = sys.version_info[:2] curpath = os.path.abspath( sys.argv[0] ) if os.path.islink(curpath): curpath = os.readlink(curpath) currentdir = os.path.dirname( curpath ) build_dir = os.path.abspath( os.path.join(currentdir, "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "lib-dynload", "lzo", "build") ) if not os.path.isdir(build_dir): build_dir = os.path.abspath( os.path.join(currentdir, "..", "..", "lib-dynload", "lzo", "build") ) dirs = os.listdir(build_dir) for d in dirs: if d.find("-%s.%s" % (p1, p2)) != -1 and d.find("lib.") != -1: sys.path.insert(0, os.path.join(build_dir, d) ) import importlib module = importlib.import_module("_lzo") module.set_block_size(16*1024*1024) compress = module.compress decompress = module.decompress sys.path.pop(0) break
6a8f5bcc6dd42e125f7219d7d692c3af610c38c3
masters/master.client.polymer/polymer_repos.py
masters/master.client.polymer/polymer_repos.py
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'ShadowDOM', 'HTMLImports', )
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'PointerEvents', 'ShadowDOM', 'HTMLImports', )
Add PointerEvents repo to master.client.polymer.
Add PointerEvents repo to master.client.polymer. R=hinoka@google.com BUG=chromium:237914 Review URL: https://codereview.chromium.org/15783003 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@201643 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'ShadowDOM', 'HTMLImports', ) Add PointerEvents repo to master.client.polymer. R=hinoka@google.com BUG=chromium:237914 Review URL: https://codereview.chromium.org/15783003 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@201643 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'PointerEvents', 'ShadowDOM', 'HTMLImports', )
<commit_before># Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'ShadowDOM', 'HTMLImports', ) <commit_msg>Add PointerEvents repo to master.client.polymer. R=hinoka@google.com BUG=chromium:237914 Review URL: https://codereview.chromium.org/15783003 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@201643 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'PointerEvents', 'ShadowDOM', 'HTMLImports', )
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'ShadowDOM', 'HTMLImports', ) Add PointerEvents repo to master.client.polymer. R=hinoka@google.com BUG=chromium:237914 Review URL: https://codereview.chromium.org/15783003 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@201643 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'PointerEvents', 'ShadowDOM', 'HTMLImports', )
<commit_before># Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'ShadowDOM', 'HTMLImports', ) <commit_msg>Add PointerEvents repo to master.client.polymer. R=hinoka@google.com BUG=chromium:237914 Review URL: https://codereview.chromium.org/15783003 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@201643 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. REPOS = ( 'polymer', 'platform', 'CustomElements', 'mdv', 'PointerGestures', 'PointerEvents', 'ShadowDOM', 'HTMLImports', )
99b0cc6ec783076b7d7db3bb15d6268c2781a234
ircstat/ircstat.py
ircstat/ircstat.py
# Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) counter = Counter() for channel in conversations: for date, conversation in conversations[channel].items(): for message in conversation.messages: counter[message.message_type] += 1 counter = OrderedDict([ ('message', counter[Message.MESSAGE]), ('action', counter[Message.ACTION]), ('join', counter[Message.JOIN]), ('part', counter[Message.PART]), ('quit', counter[Message.QUIT]), ]) log.info(counter) plugins = load_plugins(config) log.debug(plugins)
# Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) plugins = load_plugins(config) plugin_stats = {} for plugin in plugins: result = plugin.process(conversations) plugin_stats[plugin] = result log.debug(sorted(result.users.keys()))
Call into plugins from do_everything()
Call into plugins from do_everything()
Python
mit
jreese/ircstat,jreese/ircstat
# Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) counter = Counter() for channel in conversations: for date, conversation in conversations[channel].items(): for message in conversation.messages: counter[message.message_type] += 1 counter = OrderedDict([ ('message', counter[Message.MESSAGE]), ('action', counter[Message.ACTION]), ('join', counter[Message.JOIN]), ('part', counter[Message.PART]), ('quit', counter[Message.QUIT]), ]) log.info(counter) plugins = load_plugins(config) log.debug(plugins) Call into plugins from do_everything()
# Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) plugins = load_plugins(config) plugin_stats = {} for plugin in plugins: result = plugin.process(conversations) plugin_stats[plugin] = result log.debug(sorted(result.users.keys()))
<commit_before># Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) counter = Counter() for channel in conversations: for date, conversation in conversations[channel].items(): for message in conversation.messages: counter[message.message_type] += 1 counter = OrderedDict([ ('message', counter[Message.MESSAGE]), ('action', counter[Message.ACTION]), ('join', counter[Message.JOIN]), ('part', counter[Message.PART]), ('quit', counter[Message.QUIT]), ]) log.info(counter) plugins = load_plugins(config) log.debug(plugins) <commit_msg>Call into plugins from do_everything()<commit_after>
# Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) plugins = load_plugins(config) plugin_stats = {} for plugin in plugins: result = plugin.process(conversations) plugin_stats[plugin] = result log.debug(sorted(result.users.keys()))
# Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) counter = Counter() for channel in conversations: for date, conversation in conversations[channel].items(): for message in conversation.messages: counter[message.message_type] += 1 counter = OrderedDict([ ('message', counter[Message.MESSAGE]), ('action', counter[Message.ACTION]), ('join', counter[Message.JOIN]), ('part', counter[Message.PART]), ('quit', counter[Message.QUIT]), ]) log.info(counter) plugins = load_plugins(config) log.debug(plugins) Call into plugins from do_everything()# Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) plugins = load_plugins(config) plugin_stats = {} for plugin in plugins: result = plugin.process(conversations) plugin_stats[plugin] = result log.debug(sorted(result.users.keys()))
<commit_before># Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) counter = Counter() for channel in conversations: for date, conversation in conversations[channel].items(): for message in conversation.messages: counter[message.message_type] += 1 counter = OrderedDict([ ('message', counter[Message.MESSAGE]), ('action', counter[Message.ACTION]), ('join', counter[Message.JOIN]), ('part', counter[Message.PART]), ('quit', counter[Message.QUIT]), ]) log.info(counter) plugins = load_plugins(config) log.debug(plugins) <commit_msg>Call into plugins from do_everything()<commit_after># Copyright 2013 John Reese # Licensed under the MIT license VERSION = '0.1.0' from collections import Counter, OrderedDict from .ent import Message from .log import logger from .parser import LogParser from .plugins import load_plugins log = logger(__name__) def do_everything(input_paths, output_path, config): """One entry point to rule them all.""" parser = LogParser(config) conversations = parser.parse_logs(input_paths) log.info('found %d channels', len(conversations)) for channel in conversations: log.info('channel %s has %d conversations', channel, len(conversations[channel])) plugins = load_plugins(config) plugin_stats = {} for plugin in plugins: result = plugin.process(conversations) plugin_stats[plugin] = result log.debug(sorted(result.users.keys()))
f0a309b01f5c655774bd8da91090a11d7cf83095
handover_api/urls.py
handover_api/urls.py
from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'user') urlpatterns = [ url(r'^', include(router.urls)), ]
from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'dukedsuser') urlpatterns = [ url(r'^', include(router.urls)), ]
Change users route name to dukedsuser
Change users route name to dukedsuser
Python
mit
Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService
from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'user') urlpatterns = [ url(r'^', include(router.urls)), ] Change users route name to dukedsuser
from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'dukedsuser') urlpatterns = [ url(r'^', include(router.urls)), ]
<commit_before>from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'user') urlpatterns = [ url(r'^', include(router.urls)), ] <commit_msg>Change users route name to dukedsuser<commit_after>
from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'dukedsuser') urlpatterns = [ url(r'^', include(router.urls)), ]
from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'user') urlpatterns = [ url(r'^', include(router.urls)), ] Change users route name to dukedsuserfrom django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'dukedsuser') urlpatterns = [ url(r'^', include(router.urls)), ]
<commit_before>from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'user') urlpatterns = [ url(r'^', include(router.urls)), ] <commit_msg>Change users route name to dukedsuser<commit_after>from django.conf.urls import url, include from rest_framework import routers from handover_api import views router = routers.DefaultRouter() router.register(r'handovers', views.HandoverViewSet, 'handover') router.register(r'drafts', views.DraftViewSet, 'draft') router.register(r'users', views.UserViewSet, 'dukedsuser') urlpatterns = [ url(r'^', include(router.urls)), ]
a47f8ce5166b6b95b55136c2fd104e5c7b5dbf7a
swaggery/keywords.py
swaggery/keywords.py
'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set)
'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set )
Add newline to end of file
Add newline to end of file
Python
agpl-3.0
quasipedia/swaggery,quasipedia/swaggery
'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set)Add newline to end of file
'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set )
<commit_before>'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set)<commit_msg>Add newline to end of file<commit_after>
'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set )
'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set)Add newline to end of file'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set )
<commit_before>'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set)<commit_msg>Add newline to end of file<commit_after>'''A utility module to import all boilerplate Swaggery keywords into a module. Usage: from swaggery.keywords import * ''' from .api import Api, Resource, operations from .utils import Ptypes from .logger import log from .flowcontrol import Respond from .models import ( Model, Void, Integer, Float, String, Boolean, Date, DateTime, List, Set )
a86565e6bf841c376191e8936883455267cf85b9
groundstation/transfer/request_handlers/listallchannels.py
groundstation/transfer/request_handlers/listallchannels.py
import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i object descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE()
import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i channel descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE()
Fix another misleading log message
Fix another misleading log message
Python
mit
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i object descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE() Fix another misleading log message
import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i channel descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE()
<commit_before>import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i object descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE() <commit_msg>Fix another misleading log message<commit_after>
import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i channel descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE()
import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i object descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE() Fix another misleading log messageimport groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i channel descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE()
<commit_before>import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i object descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE() <commit_msg>Fix another misleading log message<commit_after>import groundstation.proto.channel_list_pb2 from groundstation import logger log = logger.getLogger(__name__) def handle_listallchannels(self): log.info("Handling LISTALLCHANNELS") payload = self.station.channels() log.info("Sending %i channel descriptions" % (len(payload))) chunk = groundstation.proto.channel_list_pb2.ChannelList() for channel in payload: log.debug("serializing channel: %s" % (channel)) description = chunk.channels.add() description.channelname = channel grefs = self.station.grefs(channel) for gref in grefs: log.debug("- serializing gref: %s" % (gref)) _gref = description.grefs.add() _gref.identifier = gref.identifier for tip in gref: log.debug("-- serializing tip: %s" % (tip)) _tip = _gref.tips.add() _tip.tip = tip response = self._Response(self.id, "DESCRIBECHANNELS", chunk.SerializeToString()) self.stream.enqueue(response) self.TERMINATE()
d74ded300b9f40ecca883b4940eb67ed9cb04b18
Rasp/weight_sensor/weight_sensor.py
Rasp/weight_sensor/weight_sensor.py
import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet Not test yet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): weight = 0 GPIO.output(self.SCK, False) while GPIO.input(self.DT): for i in range(0, 24): GPIO.output(self.SCK, True) time.sleep(0.01) weight = weight << 1 GPIO.output(self.SCK, False) if GPIO.input(self.DT): weight += 1 if weight > 100000000: return 0 GPIO.output(self.SCK, True) weight ^= 0x800000 GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(1) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO")
import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): while GPIO.input(self.DT) == 1: pass weight = 0 for i in range(0, 24): weight = weight << 1 GPIO.output(self.SCK, True) if GPIO.input(self.DT): weight += 1 GPIO.output(self.SCK, False) GPIO.output(self.SCK, True) GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(0.5) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") # Clean the GPIO
Change the DT call, now the coding is working
Change the DT call, now the coding is working
Python
mit
CarlosPena00/Mobbi,CarlosPena00/Mobbi
import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet Not test yet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): weight = 0 GPIO.output(self.SCK, False) while GPIO.input(self.DT): for i in range(0, 24): GPIO.output(self.SCK, True) time.sleep(0.01) weight = weight << 1 GPIO.output(self.SCK, False) if GPIO.input(self.DT): weight += 1 if weight > 100000000: return 0 GPIO.output(self.SCK, True) weight ^= 0x800000 GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(1) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") Change the DT call, now the coding is working
import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): while GPIO.input(self.DT) == 1: pass weight = 0 for i in range(0, 24): weight = weight << 1 GPIO.output(self.SCK, True) if GPIO.input(self.DT): weight += 1 GPIO.output(self.SCK, False) GPIO.output(self.SCK, True) GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(0.5) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") # Clean the GPIO
<commit_before>import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet Not test yet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): weight = 0 GPIO.output(self.SCK, False) while GPIO.input(self.DT): for i in range(0, 24): GPIO.output(self.SCK, True) time.sleep(0.01) weight = weight << 1 GPIO.output(self.SCK, False) if GPIO.input(self.DT): weight += 1 if weight > 100000000: return 0 GPIO.output(self.SCK, True) weight ^= 0x800000 GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(1) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") <commit_msg>Change the DT call, now the coding is working<commit_after>
import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): while GPIO.input(self.DT) == 1: pass weight = 0 for i in range(0, 24): weight = weight << 1 GPIO.output(self.SCK, True) if GPIO.input(self.DT): weight += 1 GPIO.output(self.SCK, False) GPIO.output(self.SCK, True) GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(0.5) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") # Clean the GPIO
import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet Not test yet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): weight = 0 GPIO.output(self.SCK, False) while GPIO.input(self.DT): for i in range(0, 24): GPIO.output(self.SCK, True) time.sleep(0.01) weight = weight << 1 GPIO.output(self.SCK, False) if GPIO.input(self.DT): weight += 1 if weight > 100000000: return 0 GPIO.output(self.SCK, True) weight ^= 0x800000 GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(1) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") Change the DT call, now the coding is workingimport RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): while GPIO.input(self.DT) == 1: pass weight = 0 for i in range(0, 24): weight = weight << 1 GPIO.output(self.SCK, True) if GPIO.input(self.DT): weight += 1 GPIO.output(self.SCK, False) GPIO.output(self.SCK, True) GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(0.5) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") # Clean the GPIO
<commit_before>import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet Not test yet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): weight = 0 GPIO.output(self.SCK, False) while GPIO.input(self.DT): for i in range(0, 24): GPIO.output(self.SCK, True) time.sleep(0.01) weight = weight << 1 GPIO.output(self.SCK, False) if GPIO.input(self.DT): weight += 1 if weight > 100000000: return 0 GPIO.output(self.SCK, True) weight ^= 0x800000 GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(1) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") <commit_msg>Change the DT call, now the coding is working<commit_after>import RPi.GPIO as GPIO import time class WeightSensor: """ Class that get the weight from a HX711 this module is based on the HX711 datasheet """ def __init__(self, SCK, DT): self.SCK = SCK self.DT = DT GPIO.setmode(GPIO.BCM) GPIO.setup(self.SCK, GPIO.OUT) # SCK command GPIO.setup(self.DT, GPIO.IN) # Device Output def getWeight(self): while GPIO.input(self.DT) == 1: pass weight = 0 for i in range(0, 24): weight = weight << 1 GPIO.output(self.SCK, True) if GPIO.input(self.DT): weight += 1 GPIO.output(self.SCK, False) GPIO.output(self.SCK, True) GPIO.output(self.SCK, False) return weight if __name__ == "__main__": SCK_PIN = 26 DT_PIN = 20 WS = WeightSensor(SCK_PIN, DT_PIN) try: while True: print (WS.getWeight()) time.sleep(0.5) except KeyboardInterrupt: GPIO.cleanup() print ("lean GPIO") # Clean the GPIO
327413aa982dec1c56691ea0017298a2ae7af2c1
integration_tests/hello_world/__init__.py
integration_tests/hello_world/__init__.py
integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True
integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True def check_state(state): assert('Hello World!' in state.console) assert('not in console' in state.console)
Add an integration test that deliberately fails
Add an integration test that deliberately fails
Python
bsd-2-clause
unigornel/unigornel,unigornel/unigornel
integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True Add an integration test that deliberately fails
integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True def check_state(state): assert('Hello World!' in state.console) assert('not in console' in state.console)
<commit_before>integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True <commit_msg>Add an integration test that deliberately fails<commit_after>
integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True def check_state(state): assert('Hello World!' in state.console) assert('not in console' in state.console)
integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True Add an integration test that deliberately failsintegration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True def check_state(state): assert('Hello World!' in state.console) assert('not in console' in state.console)
<commit_before>integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True <commit_msg>Add an integration test that deliberately fails<commit_after>integration_test = True name = 'HelloWorldTest' package = 'helloworld' can_crash = True can_shutdown = True def check_state(state): assert('Hello World!' in state.console) assert('not in console' in state.console)
096f9e86755a6967d732986c51ae00855551cf4d
project_name/urls.py
project_name/urls.py
from django.conf import settings from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ] if settings.DEBUG: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] + urlpatterns
from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ]
Remove usage from debug toolbar
Remove usage from debug toolbar
Python
mit
vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate
from django.conf import settings from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ] if settings.DEBUG: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] + urlpatterns Remove usage from debug toolbar
from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ]
<commit_before>from django.conf import settings from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ] if settings.DEBUG: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] + urlpatterns <commit_msg>Remove usage from debug toolbar<commit_after>
from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ]
from django.conf import settings from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ] if settings.DEBUG: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] + urlpatterns Remove usage from debug toolbarfrom django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ]
<commit_before>from django.conf import settings from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ] if settings.DEBUG: import debug_toolbar urlpatterns = [ url(r'^__debug__/', include(debug_toolbar.urls)), ] + urlpatterns <commit_msg>Remove usage from debug toolbar<commit_after>from django.conf.urls import include, url # noqa from django.contrib import admin from django.views.generic import TemplateView import django_js_reverse.views urlpatterns = [ url(r'^admin/', admin.site.urls), url(r'^jsreverse/$', django_js_reverse.views.urls_js, name='js_reverse'), url(r'^$', TemplateView.as_view(template_name='exampleapp/itworks.html'), name='home'), ]
728e235ace11f5d1dd798e438ea65bde8a41bd6e
yacs/settings/development.py
yacs/settings/development.py
from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'mydata', 'USER': 'postgreuser', 'PASSWORD': 'postgre', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } }
from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'yacs', 'USER': 'timetable', 'PASSWORD': 'thereisn0sp00n', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } }
Revert "Revert "corrected database settings merge again""
Revert "Revert "corrected database settings merge again"" This reverts commit e7766ce068eabea30c81ba699c77ed2fe488d69d.
Python
mit
jeffh/YACS,jeffh/YACS,JGrippo/YACS,jeffh/YACS,jeffh/YACS,JGrippo/YACS,JGrippo/YACS,JGrippo/YACS
from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'mydata', 'USER': 'postgreuser', 'PASSWORD': 'postgre', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } } Revert "Revert "corrected database settings merge again"" This reverts commit e7766ce068eabea30c81ba699c77ed2fe488d69d.
from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'yacs', 'USER': 'timetable', 'PASSWORD': 'thereisn0sp00n', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } }
<commit_before>from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'mydata', 'USER': 'postgreuser', 'PASSWORD': 'postgre', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } } <commit_msg>Revert "Revert "corrected database settings merge again"" This reverts commit e7766ce068eabea30c81ba699c77ed2fe488d69d.<commit_after>
from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'yacs', 'USER': 'timetable', 'PASSWORD': 'thereisn0sp00n', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } }
from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'mydata', 'USER': 'postgreuser', 'PASSWORD': 'postgre', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } } Revert "Revert "corrected database settings merge again"" This reverts commit e7766ce068eabea30c81ba699c77ed2fe488d69d.from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'yacs', 'USER': 'timetable', 'PASSWORD': 'thereisn0sp00n', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } }
<commit_before>from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'mydata', 'USER': 'postgreuser', 'PASSWORD': 'postgre', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } } <commit_msg>Revert "Revert "corrected database settings merge again"" This reverts commit e7766ce068eabea30c81ba699c77ed2fe488d69d.<commit_after>from yacs.settings.base import settings __all__ = ['settings'] with settings as s: s.DEBUG = True s.MIDDLEWARE_CLASSES += ( 'devserver.middleware.DevServerMiddleware', ) @s.lazy_eval def debug_install_apps(s): if s.DEBUG: s.INSTALLED_APPS += ( 'django_jasmine', 'devserver', ) s.DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'yacs', 'USER': 'timetable', 'PASSWORD': 'thereisn0sp00n', 'HOST': 'localhost', 'PORT': '', 'OPTIONS': { 'autocommit': True, } } }
a2e64a15a1f792308764507d8068f4e109837405
feature_extraction/util/cleanup.py
feature_extraction/util/cleanup.py
import numpy as np import scipy as sp def cell_boundary_mask(image): """ Identifies the clipping boundary of a cell image. Returns this as a mask, where True corresponds to "inside the cell". This is done by finding a mask that is True where image != 0 (as the clipped area will be perfectly zero). Imperfections in imaging or low gain may cause internal zeros; these are removed by using scipy.ndimage.binary_fill_holes(). """ cellmask = (image != 0) return sp.ndimage.binary_fill_holes(cellmask)
Add a function to identify the boundary of a preclipped cell
Add a function to identify the boundary of a preclipped cell
Python
apache-2.0
widoptimization-willett/feature-extraction
Add a function to identify the boundary of a preclipped cell
import numpy as np import scipy as sp def cell_boundary_mask(image): """ Identifies the clipping boundary of a cell image. Returns this as a mask, where True corresponds to "inside the cell". This is done by finding a mask that is True where image != 0 (as the clipped area will be perfectly zero). Imperfections in imaging or low gain may cause internal zeros; these are removed by using scipy.ndimage.binary_fill_holes(). """ cellmask = (image != 0) return sp.ndimage.binary_fill_holes(cellmask)
<commit_before><commit_msg>Add a function to identify the boundary of a preclipped cell<commit_after>
import numpy as np import scipy as sp def cell_boundary_mask(image): """ Identifies the clipping boundary of a cell image. Returns this as a mask, where True corresponds to "inside the cell". This is done by finding a mask that is True where image != 0 (as the clipped area will be perfectly zero). Imperfections in imaging or low gain may cause internal zeros; these are removed by using scipy.ndimage.binary_fill_holes(). """ cellmask = (image != 0) return sp.ndimage.binary_fill_holes(cellmask)
Add a function to identify the boundary of a preclipped cellimport numpy as np import scipy as sp def cell_boundary_mask(image): """ Identifies the clipping boundary of a cell image. Returns this as a mask, where True corresponds to "inside the cell". This is done by finding a mask that is True where image != 0 (as the clipped area will be perfectly zero). Imperfections in imaging or low gain may cause internal zeros; these are removed by using scipy.ndimage.binary_fill_holes(). """ cellmask = (image != 0) return sp.ndimage.binary_fill_holes(cellmask)
<commit_before><commit_msg>Add a function to identify the boundary of a preclipped cell<commit_after>import numpy as np import scipy as sp def cell_boundary_mask(image): """ Identifies the clipping boundary of a cell image. Returns this as a mask, where True corresponds to "inside the cell". This is done by finding a mask that is True where image != 0 (as the clipped area will be perfectly zero). Imperfections in imaging or low gain may cause internal zeros; these are removed by using scipy.ndimage.binary_fill_holes(). """ cellmask = (image != 0) return sp.ndimage.binary_fill_holes(cellmask)
74d7c55ab6584daef444923c888e6905d8c9ccf1
expense/admin.py
expense/admin.py
from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] readonly_fields = ['amount'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin)
from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin)
Allow editing amount field in expensenote
Allow editing amount field in expensenote
Python
mpl-2.0
jackbravo/condorest-django,jackbravo/condorest-django,jackbravo/condorest-django
from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] readonly_fields = ['amount'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin) Allow editing amount field in expensenote
from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin)
<commit_before>from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] readonly_fields = ['amount'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin) <commit_msg>Allow editing amount field in expensenote<commit_after>
from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin)
from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] readonly_fields = ['amount'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin) Allow editing amount field in expensenotefrom django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin)
<commit_before>from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] readonly_fields = ['amount'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin) <commit_msg>Allow editing amount field in expensenote<commit_after>from django.contrib import admin from expense.models import ExpenseNote class ExpenseNoteAdmin(admin.ModelAdmin): list_display = ['date', 'save_in_ledger', 'details', 'contact', 'credit_account', 'debit_account', 'amount'] list_filter = ['date', 'save_in_ledger'] fields = ('date', 'contact', 'number', 'details', 'credit_account', 'debit_account', 'amount', 'save_in_ledger') admin.site.register(ExpenseNote, ExpenseNoteAdmin)
b2cb55a58f744bed6b860568d4af1782846b178f
tests/examples/data_solutions/dp-foundation/test_plan.py
tests/examples/data_solutions/dp-foundation/test_plan.py
# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 281
# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 286
Fix test, for real? :-)
Fix test, for real? :-)
Python
apache-2.0
GoogleCloudPlatform/cloud-foundation-fabric,GoogleCloudPlatform/cloud-foundation-fabric,GoogleCloudPlatform/cloud-foundation-fabric,GoogleCloudPlatform/cloud-foundation-fabric
# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 281 Fix test, for real? :-)
# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 286
<commit_before># Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 281 <commit_msg>Fix test, for real? :-)<commit_after>
# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 286
# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 281 Fix test, for real? :-)# Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 286
<commit_before># Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 281 <commit_msg>Fix test, for real? :-)<commit_after># Copyright 2022 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import pytest FIXTURES_DIR = os.path.join(os.path.dirname(__file__), 'fixture') def test_resources(e2e_plan_runner): "Test that plan works and the numbers of resources is as expected." modules, resources = e2e_plan_runner(FIXTURES_DIR) assert len(modules) == 40 assert len(resources) == 286
3ef77edcbf4b3268399f439b89f15ef087bd06bb
chamber/utils/logging.py
chamber/utils/logging.py
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder)) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
Set default value for json.dumps
Set default value for json.dumps Use default value when type cannot be serialized.
Python
bsd-3-clause
druids/django-chamber
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder)) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True Set default value for json.dumps Use default value when type cannot be serialized.
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
<commit_before>import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder)) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True <commit_msg>Set default value for json.dumps Use default value when type cannot be serialized.<commit_after>
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder)) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True Set default value for json.dumps Use default value when type cannot be serialized.import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
<commit_before>import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder)) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True <commit_msg>Set default value for json.dumps Use default value when type cannot be serialized.<commit_after>import json import logging import platform from django.core.serializers.json import DjangoJSONEncoder from django.http import UnreadablePostError def skip_unreadable_post(record): if record.exc_info: exc_type, exc_value = record.exc_info[:2] if isinstance(exc_value, UnreadablePostError): return False return True class AppendExtraJSONHandler(logging.StreamHandler): DEFAULT_STREAM_HANDLER_VARIABLE_KEYS = { 'name', 'msg', 'args', 'levelname', 'levelno', 'pathname', 'filename', 'module', 'exc_info', 'exc_text', 'stack_info', 'lineno', 'funcName', 'created', 'msecs', 'relativeCreated', 'thread', 'threadName', 'processName', 'process', } CUSTOM_STREAM_HANDLER_VARIABLE_KEYS = {'hostname'} def emit(self, record): extra = { k: v for k, v in record.__dict__.items() if k not in self.DEFAULT_STREAM_HANDLER_VARIABLE_KEYS.union(self.CUSTOM_STREAM_HANDLER_VARIABLE_KEYS) } record.msg = '{} --- {}'.format(record.msg, json.dumps(extra, cls=DjangoJSONEncoder, default=lambda x: '<<NON-SERIALIZABLE TYPE: {}>>'.format(type(x).__qualname__))) super().emit(record) class HostnameFilter(logging.Filter): hostname = platform.node() def filter(self, record): record.hostname = self.hostname return True
af3f0b520a868832f708e7692736005e6aee9c4b
core/admin.py
core/admin.py
from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): readonly_fields = ['owner'] def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ['file', 'url_name'] return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin)
from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): def get_changeform_initial_data(self, request): return {'owner': request.user} def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ('file', 'url_name') return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin)
Make owner visible to superusers
Make owner visible to superusers
Python
mit
swarmer/files,swarmer/files
from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): readonly_fields = ['owner'] def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ['file', 'url_name'] return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin) Make owner visible to superusers
from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): def get_changeform_initial_data(self, request): return {'owner': request.user} def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ('file', 'url_name') return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin)
<commit_before>from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): readonly_fields = ['owner'] def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ['file', 'url_name'] return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin) <commit_msg>Make owner visible to superusers<commit_after>
from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): def get_changeform_initial_data(self, request): return {'owner': request.user} def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ('file', 'url_name') return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin)
from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): readonly_fields = ['owner'] def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ['file', 'url_name'] return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin) Make owner visible to superusersfrom django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): def get_changeform_initial_data(self, request): return {'owner': request.user} def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ('file', 'url_name') return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin)
<commit_before>from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): readonly_fields = ['owner'] def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ['file', 'url_name'] return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin) <commit_msg>Make owner visible to superusers<commit_after>from django.contrib import admin from .models import FileUpload class FileUploadAdmin(admin.ModelAdmin): def get_changeform_initial_data(self, request): return {'owner': request.user} def get_fields(self, request, obj=None): if request.user.is_superuser: return ['file', 'url_name', 'owner'] else: return ['file', 'url_name'] def get_readonly_fields(self, request, obj=None): if obj: return self.readonly_fields + ('file', 'url_name') return self.readonly_fields def has_change_permission(self, request, obj=None): has_class_permission = super().has_change_permission(request, obj) if not has_class_permission: return False if obj is None: return True owns_object = request.user.id == obj.owner.id if request.user.is_superuser or owns_object: return True return False def has_delete_permission(self, request, obj=None): return self.has_change_permission(request, obj) def get_queryset(self, request): if request.user.is_superuser: return FileUpload.objects.all() return FileUpload.objects.filter(owner=request.user) def save_model(self, request, obj, form, change): if not change: obj.owner = request.user obj.save() admin.site.register(FileUpload, FileUploadAdmin)
fac8f1af6bd3eb46fe2a26689b0d85f358934f7a
network_checker/url_access_checker/cli.py
network_checker/url_access_checker/cli.py
# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
Add EDITOR variable in urlaccesschecker
Add EDITOR variable in urlaccesschecker This variable required by cmd2 library to work. Without - it fails on bootstrap with traceback: File "/usr/lib/python2.6/site-packages/cmd2.py", line 424, in Cmd if subprocess.Popen(['which', editor]) Change-Id: I061f88b65d7bc7181752cd076da4067df2f84131 Related-Bug: 1439686
Python
apache-2.0
prmtl/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,huntxu/fuel-web,prmtl/fuel-web,eayunstack/fuel-web,stackforge/fuel-web,SmartInfrastructures/fuel-web-dev,eayunstack/fuel-web,eayunstack/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,SmartInfrastructures/fuel-web-dev,nebril/fuel-web,huntxu/fuel-web,prmtl/fuel-web,huntxu/fuel-web,huntxu/fuel-web,eayunstack/fuel-web,huntxu/fuel-web,nebril/fuel-web,prmtl/fuel-web,stackforge/fuel-web,stackforge/fuel-web,prmtl/fuel-web,nebril/fuel-web
# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:])) Add EDITOR variable in urlaccesschecker This variable required by cmd2 library to work. Without - it fails on bootstrap with traceback: File "/usr/lib/python2.6/site-packages/cmd2.py", line 424, in Cmd if subprocess.Popen(['which', editor]) Change-Id: I061f88b65d7bc7181752cd076da4067df2f84131 Related-Bug: 1439686
# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
<commit_before># Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:])) <commit_msg>Add EDITOR variable in urlaccesschecker This variable required by cmd2 library to work. Without - it fails on bootstrap with traceback: File "/usr/lib/python2.6/site-packages/cmd2.py", line 424, in Cmd if subprocess.Popen(['which', editor]) Change-Id: I061f88b65d7bc7181752cd076da4067df2f84131 Related-Bug: 1439686<commit_after>
# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:])) Add EDITOR variable in urlaccesschecker This variable required by cmd2 library to work. Without - it fails on bootstrap with traceback: File "/usr/lib/python2.6/site-packages/cmd2.py", line 424, in Cmd if subprocess.Popen(['which', editor]) Change-Id: I061f88b65d7bc7181752cd076da4067df2f84131 Related-Bug: 1439686# Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
<commit_before># Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:])) <commit_msg>Add EDITOR variable in urlaccesschecker This variable required by cmd2 library to work. Without - it fails on bootstrap with traceback: File "/usr/lib/python2.6/site-packages/cmd2.py", line 424, in Cmd if subprocess.Popen(['which', editor]) Change-Id: I061f88b65d7bc7181752cd076da4067df2f84131 Related-Bug: 1439686<commit_after># Copyright 2015 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os import sys # fixed in cmd2 >=0.6.6 os.environ['EDITOR'] = '/usr/bin/nano' from cliff.commandmanager import CommandManager from fuel_network_checker import base_app class UrlAccessCheckApp(base_app.BaseApp): LOG_FILENAME = '/var/log/url_access_checker.log' def __init__(self): super(UrlAccessCheckApp, self).__init__( description='Url access check application', version='0.1', command_manager=CommandManager('urlaccesscheck'), ) def main(argv=sys.argv[1:]): myapp = UrlAccessCheckApp() return myapp.run(argv) if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
6a7a553dd51abbd6ade2e448bae0e4e2a8036f23
generate-data.py
generate-data.py
#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): data = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) data[stimulus[1] * numx + stimulus[0]] = '1' return data, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): data, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(data, data) # duplicate for two eyes scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y))
#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): ldata = ['0' for i in range(numx * numy)] rdata = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) ldata[stimulus[1] * numx + stimulus[0]] = '1' rdata[stimulus[1] * numx + stimulus[0]] = '1' return ldata, rdata, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): ldata, rdata, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(ldata, rdata) scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y))
Add separate left and right eye data generation
Add separate left and right eye data generation
Python
mit
jeffames-cs/nnot
#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): data = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) data[stimulus[1] * numx + stimulus[0]] = '1' return data, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): data, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(data, data) # duplicate for two eyes scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y)) Add separate left and right eye data generation
#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): ldata = ['0' for i in range(numx * numy)] rdata = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) ldata[stimulus[1] * numx + stimulus[0]] = '1' rdata[stimulus[1] * numx + stimulus[0]] = '1' return ldata, rdata, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): ldata, rdata, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(ldata, rdata) scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y))
<commit_before>#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): data = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) data[stimulus[1] * numx + stimulus[0]] = '1' return data, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): data, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(data, data) # duplicate for two eyes scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y)) <commit_msg>Add separate left and right eye data generation<commit_after>
#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): ldata = ['0' for i in range(numx * numy)] rdata = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) ldata[stimulus[1] * numx + stimulus[0]] = '1' rdata[stimulus[1] * numx + stimulus[0]] = '1' return ldata, rdata, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): ldata, rdata, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(ldata, rdata) scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y))
#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): data = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) data[stimulus[1] * numx + stimulus[0]] = '1' return data, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): data, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(data, data) # duplicate for two eyes scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y)) Add separate left and right eye data generation#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): ldata = ['0' for i in range(numx * numy)] rdata = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) ldata[stimulus[1] * numx + stimulus[0]] = '1' rdata[stimulus[1] * numx + stimulus[0]] = '1' return ldata, rdata, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): ldata, rdata, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(ldata, rdata) scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y))
<commit_before>#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): data = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) data[stimulus[1] * numx + stimulus[0]] = '1' return data, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): data, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(data, data) # duplicate for two eyes scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y)) <commit_msg>Add separate left and right eye data generation<commit_after>#!/usr/bin/env python import random from nott_params import * num_samples = int(gridDim[0] * gridDim[1] * 10) def generate_data(numx, numy): ldata = ['0' for i in range(numx * numy)] rdata = ['0' for i in range(numx * numy)] stimulus = (random.randint(0, numx - 1), random.randint(0, numy - 1)) ldata[stimulus[1] * numx + stimulus[0]] = '1' rdata[stimulus[1] * numx + stimulus[0]] = '1' return ldata, rdata, stimulus def print_header(): print("{0} {1} {2}".format(num_samples, num_inputs, num_outputs)) def print_input(left, right): data = left + right print(' '.join(data)) if __name__ == '__main__': random.seed() print_header() for i in range(num_samples): ldata, rdata, stimulus = generate_data(gridDim[0], gridDim[1]) print_input(ldata, rdata) scaled_x = 2 * float(stimulus[0]) / gridDim[0] - 1 scaled_y = 2 * float(stimulus[1]) / gridDim[1] - 1 print("{0} {1} {2} {3}".format(scaled_x, scaled_y, scaled_x, scaled_y))
b8848917db13e374238f052f701b2f11e7ab8d36
tests/config_test.py
tests/config_test.py
### Going to fill this out in a subsequent PR
import os import tempfile from unittest import TestCase from dusty import constants, config class TestConfig(TestCase): def setUp(self): self.temp_config_path = tempfile.mkstemp()[1] self.old_config_path = constants.CONFIG_PATH constants.CONFIG_PATH = self.temp_config_path self.test_config = {'bundles': ['bundle-a'], 'repo_overrides': {'repo-a': '/var/run/repo-a'}, 'docker_user': 'root'} def tearDown(self): constants.CONFIG_PATH = self.old_config_path os.remove(self.temp_config_path) def test_save_and_get_config(self): config.save_config(self.test_config) self.assertItemsEqual(self.test_config, config.get_config()) def test_get_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) self.assertItemsEqual(config.get_config_value('repo_overrides'), {'repo-a': '/var/run/repo-a'}) self.assertEqual(config.get_config_value('docker_user'), 'root') def test_save_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) config.save_config_value('bundles', ['bundle-b']) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-b']) config.save_config_value('new_key', 'bacon') self.assertEqual(config.get_config_value('new_key'), 'bacon') def test_assert_config_key(self): config.save_config(self.test_config) config.assert_config_key('docker_user') with self.assertRaises(KeyError): config.assert_config_key('nonexistent_key')
Add some tests for the config module
Add some tests for the config module
Python
mit
gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty,gamechanger/dusty
### Going to fill this out in a subsequent PR Add some tests for the config module
import os import tempfile from unittest import TestCase from dusty import constants, config class TestConfig(TestCase): def setUp(self): self.temp_config_path = tempfile.mkstemp()[1] self.old_config_path = constants.CONFIG_PATH constants.CONFIG_PATH = self.temp_config_path self.test_config = {'bundles': ['bundle-a'], 'repo_overrides': {'repo-a': '/var/run/repo-a'}, 'docker_user': 'root'} def tearDown(self): constants.CONFIG_PATH = self.old_config_path os.remove(self.temp_config_path) def test_save_and_get_config(self): config.save_config(self.test_config) self.assertItemsEqual(self.test_config, config.get_config()) def test_get_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) self.assertItemsEqual(config.get_config_value('repo_overrides'), {'repo-a': '/var/run/repo-a'}) self.assertEqual(config.get_config_value('docker_user'), 'root') def test_save_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) config.save_config_value('bundles', ['bundle-b']) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-b']) config.save_config_value('new_key', 'bacon') self.assertEqual(config.get_config_value('new_key'), 'bacon') def test_assert_config_key(self): config.save_config(self.test_config) config.assert_config_key('docker_user') with self.assertRaises(KeyError): config.assert_config_key('nonexistent_key')
<commit_before>### Going to fill this out in a subsequent PR <commit_msg>Add some tests for the config module<commit_after>
import os import tempfile from unittest import TestCase from dusty import constants, config class TestConfig(TestCase): def setUp(self): self.temp_config_path = tempfile.mkstemp()[1] self.old_config_path = constants.CONFIG_PATH constants.CONFIG_PATH = self.temp_config_path self.test_config = {'bundles': ['bundle-a'], 'repo_overrides': {'repo-a': '/var/run/repo-a'}, 'docker_user': 'root'} def tearDown(self): constants.CONFIG_PATH = self.old_config_path os.remove(self.temp_config_path) def test_save_and_get_config(self): config.save_config(self.test_config) self.assertItemsEqual(self.test_config, config.get_config()) def test_get_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) self.assertItemsEqual(config.get_config_value('repo_overrides'), {'repo-a': '/var/run/repo-a'}) self.assertEqual(config.get_config_value('docker_user'), 'root') def test_save_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) config.save_config_value('bundles', ['bundle-b']) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-b']) config.save_config_value('new_key', 'bacon') self.assertEqual(config.get_config_value('new_key'), 'bacon') def test_assert_config_key(self): config.save_config(self.test_config) config.assert_config_key('docker_user') with self.assertRaises(KeyError): config.assert_config_key('nonexistent_key')
### Going to fill this out in a subsequent PR Add some tests for the config moduleimport os import tempfile from unittest import TestCase from dusty import constants, config class TestConfig(TestCase): def setUp(self): self.temp_config_path = tempfile.mkstemp()[1] self.old_config_path = constants.CONFIG_PATH constants.CONFIG_PATH = self.temp_config_path self.test_config = {'bundles': ['bundle-a'], 'repo_overrides': {'repo-a': '/var/run/repo-a'}, 'docker_user': 'root'} def tearDown(self): constants.CONFIG_PATH = self.old_config_path os.remove(self.temp_config_path) def test_save_and_get_config(self): config.save_config(self.test_config) self.assertItemsEqual(self.test_config, config.get_config()) def test_get_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) self.assertItemsEqual(config.get_config_value('repo_overrides'), {'repo-a': '/var/run/repo-a'}) self.assertEqual(config.get_config_value('docker_user'), 'root') def test_save_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) config.save_config_value('bundles', ['bundle-b']) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-b']) config.save_config_value('new_key', 'bacon') self.assertEqual(config.get_config_value('new_key'), 'bacon') def test_assert_config_key(self): config.save_config(self.test_config) config.assert_config_key('docker_user') with self.assertRaises(KeyError): config.assert_config_key('nonexistent_key')
<commit_before>### Going to fill this out in a subsequent PR <commit_msg>Add some tests for the config module<commit_after>import os import tempfile from unittest import TestCase from dusty import constants, config class TestConfig(TestCase): def setUp(self): self.temp_config_path = tempfile.mkstemp()[1] self.old_config_path = constants.CONFIG_PATH constants.CONFIG_PATH = self.temp_config_path self.test_config = {'bundles': ['bundle-a'], 'repo_overrides': {'repo-a': '/var/run/repo-a'}, 'docker_user': 'root'} def tearDown(self): constants.CONFIG_PATH = self.old_config_path os.remove(self.temp_config_path) def test_save_and_get_config(self): config.save_config(self.test_config) self.assertItemsEqual(self.test_config, config.get_config()) def test_get_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) self.assertItemsEqual(config.get_config_value('repo_overrides'), {'repo-a': '/var/run/repo-a'}) self.assertEqual(config.get_config_value('docker_user'), 'root') def test_save_config_value(self): config.save_config(self.test_config) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-a']) config.save_config_value('bundles', ['bundle-b']) self.assertItemsEqual(config.get_config_value('bundles'), ['bundle-b']) config.save_config_value('new_key', 'bacon') self.assertEqual(config.get_config_value('new_key'), 'bacon') def test_assert_config_key(self): config.save_config(self.test_config) config.assert_config_key('docker_user') with self.assertRaises(KeyError): config.assert_config_key('nonexistent_key')
7635f6dd4a208982f68617c5aeec36fbc6d69dc9
tests/test_compat.py
tests/test_compat.py
from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url)
from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params, URL ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' self.yarn_available = isinstance(URL, str) def _get_merge_functions(self): if self.yarn_available: return { _vanilla_merge_url_params, _yarl_merge_url_params } return { _vanilla_merge_url_params, } @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): if func in self._get_merge_functions(): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url)
Fix tests when yarn is not available
Fix tests when yarn is not available
Python
mit
pnuckowski/aioresponses
from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url) Fix tests when yarn is not available
from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params, URL ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' self.yarn_available = isinstance(URL, str) def _get_merge_functions(self): if self.yarn_available: return { _vanilla_merge_url_params, _yarl_merge_url_params } return { _vanilla_merge_url_params, } @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): if func in self._get_merge_functions(): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url)
<commit_before>from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url) <commit_msg>Fix tests when yarn is not available<commit_after>
from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params, URL ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' self.yarn_available = isinstance(URL, str) def _get_merge_functions(self): if self.yarn_available: return { _vanilla_merge_url_params, _yarl_merge_url_params } return { _vanilla_merge_url_params, } @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): if func in self._get_merge_functions(): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url)
from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url) Fix tests when yarn is not availablefrom unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params, URL ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' self.yarn_available = isinstance(URL, str) def _get_merge_functions(self): if self.yarn_available: return { _vanilla_merge_url_params, _yarl_merge_url_params } return { _vanilla_merge_url_params, } @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): if func in self._get_merge_functions(): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url)
<commit_before>from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url) <commit_msg>Fix tests when yarn is not available<commit_after>from unittest import TestCase from ddt import ddt, data from aioresponses.compat import ( _vanilla_merge_url_params, _yarl_merge_url_params, URL ) @ddt class CompatTestCase(TestCase): use_default_loop = False def setUp(self): self.url = 'http://example.com/api?foo=bar#fragment' self.yarn_available = isinstance(URL, str) def _get_merge_functions(self): if self.yarn_available: return { _vanilla_merge_url_params, _yarl_merge_url_params } return { _vanilla_merge_url_params, } @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_no_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, None), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_empty_params_returns_same_url(self, func): if func in self._get_merge_functions(): self.assertEqual(func(self.url, {}), self.url) @data( _vanilla_merge_url_params, _yarl_merge_url_params ) def test_params_returns_corrected_url(self, func): if func in self._get_merge_functions(): expected_url = 'http://example.com/api?foo=bar&x=42#fragment' self.assertEqual(func(self.url, {'x': 42}), expected_url)
a9b9fe86806fe573a520c6416146b666cf429742
trac/upgrades/db11.py
trac/upgrades/db11.py
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macro_dir__ as macro_dir for f in os.listdir(macro_dir): if not f.endswith('.py'): continue src = os.path.join(macro_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macros_dir__ as macros_dir for f in os.listdir(macros_dir): if not f.endswith('.py'): continue src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
Fix typo in upgrade script
Fix typo in upgrade script git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2
Python
bsd-3-clause
jun66j5/trac-ja,netjunki/trac-Pygit2,jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,jun66j5/trac-ja,walty8/trac,netjunki/trac-Pygit2,jun66j5/trac-ja,walty8/trac,walty8/trac
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macro_dir__ as macro_dir for f in os.listdir(macro_dir): if not f.endswith('.py'): continue src = os.path.join(macro_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst) Fix typo in upgrade script git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macros_dir__ as macros_dir for f in os.listdir(macros_dir): if not f.endswith('.py'): continue src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
<commit_before>import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macro_dir__ as macro_dir for f in os.listdir(macro_dir): if not f.endswith('.py'): continue src = os.path.join(macro_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst) <commit_msg>Fix typo in upgrade script git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macros_dir__ as macros_dir for f in os.listdir(macros_dir): if not f.endswith('.py'): continue src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macro_dir__ as macro_dir for f in os.listdir(macro_dir): if not f.endswith('.py'): continue src = os.path.join(macro_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst) Fix typo in upgrade script git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macros_dir__ as macros_dir for f in os.listdir(macros_dir): if not f.endswith('.py'): continue src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
<commit_before>import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macro_dir__ as macro_dir for f in os.listdir(macro_dir): if not f.endswith('.py'): continue src = os.path.join(macro_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst) <commit_msg>Fix typo in upgrade script git-svn-id: eda3d06fcef731589ace1b284159cead3416df9b@1647 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>import os.path import shutil sql = """ -- Remove empty values from the milestone list DELETE FROM milestone WHERE COALESCE(name,'')=''; -- Add a description column to the version table, and remove unnamed versions CREATE TEMP TABLE version_old AS SELECT * FROM version; DROP TABLE version; CREATE TABLE version ( name text PRIMARY KEY, time integer, description text ); INSERT INTO version(name,time,description) SELECT name,time,'' FROM version_old WHERE COALESCE(name,'')<>''; -- Add a description column to the component table, and remove unnamed components CREATE TEMP TABLE component_old AS SELECT * FROM component; DROP TABLE component; CREATE TABLE component ( name text PRIMARY KEY, owner text, description text ); INSERT INTO component(name,owner,description) SELECT name,owner,'' FROM component_old WHERE COALESCE(name,'')<>''; """ def do_upgrade(env, ver, cursor): cursor.execute(sql) # Copy the new default wiki macros over to the environment from trac.siteconfig import __default_macros_dir__ as macros_dir for f in os.listdir(macros_dir): if not f.endswith('.py'): continue src = os.path.join(macros_dir, f) dst = os.path.join(env.path, 'wiki-macros', f) if not os.path.isfile(dst): shutil.copy2(src, dst)
b65c733949b02531b14e7eb7868d275deff4c192
tests/test_trivia.py
tests/test_trivia.py
import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) if __name__ == "__main__": unittest.main()
import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) def test_wrong_encoding(self): self.assertTrue(check_answer("a résumé", "resume")) if __name__ == "__main__": unittest.main()
Test trivia answer with wrong encoding
[Tests] Test trivia answer with wrong encoding
Python
mit
Harmon758/Harmonbot,Harmon758/Harmonbot
import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) if __name__ == "__main__": unittest.main() [Tests] Test trivia answer with wrong encoding
import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) def test_wrong_encoding(self): self.assertTrue(check_answer("a résumé", "resume")) if __name__ == "__main__": unittest.main()
<commit_before> import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) if __name__ == "__main__": unittest.main() <commit_msg>[Tests] Test trivia answer with wrong encoding<commit_after>
import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) def test_wrong_encoding(self): self.assertTrue(check_answer("a résumé", "resume")) if __name__ == "__main__": unittest.main()
import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) if __name__ == "__main__": unittest.main() [Tests] Test trivia answer with wrong encoding import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) def test_wrong_encoding(self): self.assertTrue(check_answer("a résumé", "resume")) if __name__ == "__main__": unittest.main()
<commit_before> import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) if __name__ == "__main__": unittest.main() <commit_msg>[Tests] Test trivia answer with wrong encoding<commit_after> import unittest from units.trivia import check_answer class TestCheckAnswer(unittest.TestCase): def test_correct_answer(self): self.assertTrue(check_answer("correct", "correct")) def test_incorrect_answer(self): self.assertFalse(check_answer("correct", "incorrect")) def test_parentheses_with_article_prefix(self): self.assertTrue( check_answer( "the ISS (the International Space Station)", "International Space Station" ) ) self.assertTrue( check_answer("Holland (The Netherlands)", "Netherlands") ) def test_wrong_encoding(self): self.assertTrue(check_answer("a résumé", "resume")) if __name__ == "__main__": unittest.main()
cd084fced40beb429474fabf33dff675e9ccb522
syncplay/__init__.py
syncplay/__init__.py
version = '1.6.7' revision = '' milestone = 'Yoitsu' release_number = '94' projectURL = 'https://syncplay.pl/'
version = '1.6.8' revision = ' development' milestone = 'Yoitsu' release_number = '95' projectURL = 'https://syncplay.pl/'
Mark as 1.6.8 dev (build 95)
Mark as 1.6.8 dev (build 95)
Python
apache-2.0
alby128/syncplay,alby128/syncplay,Syncplay/syncplay,Syncplay/syncplay
version = '1.6.7' revision = '' milestone = 'Yoitsu' release_number = '94' projectURL = 'https://syncplay.pl/' Mark as 1.6.8 dev (build 95)
version = '1.6.8' revision = ' development' milestone = 'Yoitsu' release_number = '95' projectURL = 'https://syncplay.pl/'
<commit_before>version = '1.6.7' revision = '' milestone = 'Yoitsu' release_number = '94' projectURL = 'https://syncplay.pl/' <commit_msg>Mark as 1.6.8 dev (build 95)<commit_after>
version = '1.6.8' revision = ' development' milestone = 'Yoitsu' release_number = '95' projectURL = 'https://syncplay.pl/'
version = '1.6.7' revision = '' milestone = 'Yoitsu' release_number = '94' projectURL = 'https://syncplay.pl/' Mark as 1.6.8 dev (build 95)version = '1.6.8' revision = ' development' milestone = 'Yoitsu' release_number = '95' projectURL = 'https://syncplay.pl/'
<commit_before>version = '1.6.7' revision = '' milestone = 'Yoitsu' release_number = '94' projectURL = 'https://syncplay.pl/' <commit_msg>Mark as 1.6.8 dev (build 95)<commit_after>version = '1.6.8' revision = ' development' milestone = 'Yoitsu' release_number = '95' projectURL = 'https://syncplay.pl/'
547e2cbddd26f2e158fbbdab8ae22605cbd270c9
joby/items.py
joby/items.py
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(Job): pass
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field from scrapy.loader import Identity, ItemLoader from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(JobItem): pass class JobItemLoader(ItemLoader): default_input_processor = Identity() default_output_processor = TakeFirst() class DataScienceJobsItemLoader(JobItemLoader): pass
Add JobItemLoader and DataScienceJobsItemLoader class.
Add JobItemLoader and DataScienceJobsItemLoader class.
Python
mit
cyberbikepunk/job-spiders
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(Job): pass Add JobItemLoader and DataScienceJobsItemLoader class.
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field from scrapy.loader import Identity, ItemLoader from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(JobItem): pass class JobItemLoader(ItemLoader): default_input_processor = Identity() default_output_processor = TakeFirst() class DataScienceJobsItemLoader(JobItemLoader): pass
<commit_before># -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(Job): pass <commit_msg>Add JobItemLoader and DataScienceJobsItemLoader class.<commit_after>
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field from scrapy.loader import Identity, ItemLoader from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(JobItem): pass class JobItemLoader(ItemLoader): default_input_processor = Identity() default_output_processor = TakeFirst() class DataScienceJobsItemLoader(JobItemLoader): pass
# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(Job): pass Add JobItemLoader and DataScienceJobsItemLoader class.# -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field from scrapy.loader import Identity, ItemLoader from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(JobItem): pass class JobItemLoader(ItemLoader): default_input_processor = Identity() default_output_processor = TakeFirst() class DataScienceJobsItemLoader(JobItemLoader): pass
<commit_before># -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(Job): pass <commit_msg>Add JobItemLoader and DataScienceJobsItemLoader class.<commit_after># -*- coding: utf-8 -*- # Define here the models for your scraped items # # See documentation in: # http://doc.scrapy.org/en/latest/topics/items.html from scrapy import Item, Field from scrapy.loader import Identity, ItemLoader from scrapy.loader.processors import TakeFirst class JobItem(Item): website_url = Field() website_language = Field() publication_date = Field() posting_id = Field() url = Field() number_of_views = Field() contact_email = Field() contact_name = Field() employment_type = Field() workload = Field() duration = Field() remote = Field() title = Field(primary_key=True) keywords = Field() abstract = Field() description = Field() salary = Field() level = Field() responsabilities = Field() required_skills = Field() required_languages = Field() company = Field(primary_key=True) city = Field() country = Field() postal_code = Field() company_website = Field() company_category = Field() company_description = Field() start_date = Field() end_date = Field() class DataScienceJobsJobItem(JobItem): pass class JobItemLoader(ItemLoader): default_input_processor = Identity() default_output_processor = TakeFirst() class DataScienceJobsItemLoader(JobItemLoader): pass
49891ea015c082443b2c709650d125a649e36187
poolwatcher/poolWatcher.py
poolwatcher/poolWatcher.py
# # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() gsite=condorMonitor.Group(status,lambda el:el['GLIDEIN_Site'],lambda el:el) gsite.load() sites=gsite.fetchStored() print time.ctime() for s in sites.keys(): print "Site: '%s' VMs: %i"%(s,len(sites[s])) time.sleep(30)
# # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() ssite=condorMonitor.Summarize(status,lambda el:[el['GLIDEIN_Site'],el['State']]) sites=ssite.countStored() print time.ctime() for s in sites.keys(): states=sites[s] if states.has_key('Claimed'): claimed=states['Claimed'] else: claimed=0 if states.has_key('Unclaimed'): unclaimed=states['Unclaimed'] else: unclaimed=0 print "Site: '%s' Claimed: %i Unclaimed: %i"%(s,claimed,unclaimed) time.sleep(30)
Add distinction between claimed and unclaimed
Add distinction between claimed and unclaimed
Python
bsd-3-clause
holzman/glideinwms-old,bbockelm/glideinWMS,bbockelm/glideinWMS,bbockelm/glideinWMS,holzman/glideinwms-old,bbockelm/glideinWMS,holzman/glideinwms-old
# # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() gsite=condorMonitor.Group(status,lambda el:el['GLIDEIN_Site'],lambda el:el) gsite.load() sites=gsite.fetchStored() print time.ctime() for s in sites.keys(): print "Site: '%s' VMs: %i"%(s,len(sites[s])) time.sleep(30) Add distinction between claimed and unclaimed
# # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() ssite=condorMonitor.Summarize(status,lambda el:[el['GLIDEIN_Site'],el['State']]) sites=ssite.countStored() print time.ctime() for s in sites.keys(): states=sites[s] if states.has_key('Claimed'): claimed=states['Claimed'] else: claimed=0 if states.has_key('Unclaimed'): unclaimed=states['Unclaimed'] else: unclaimed=0 print "Site: '%s' Claimed: %i Unclaimed: %i"%(s,claimed,unclaimed) time.sleep(30)
<commit_before># # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() gsite=condorMonitor.Group(status,lambda el:el['GLIDEIN_Site'],lambda el:el) gsite.load() sites=gsite.fetchStored() print time.ctime() for s in sites.keys(): print "Site: '%s' VMs: %i"%(s,len(sites[s])) time.sleep(30) <commit_msg>Add distinction between claimed and unclaimed<commit_after>
# # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() ssite=condorMonitor.Summarize(status,lambda el:[el['GLIDEIN_Site'],el['State']]) sites=ssite.countStored() print time.ctime() for s in sites.keys(): states=sites[s] if states.has_key('Claimed'): claimed=states['Claimed'] else: claimed=0 if states.has_key('Unclaimed'): unclaimed=states['Unclaimed'] else: unclaimed=0 print "Site: '%s' Claimed: %i Unclaimed: %i"%(s,claimed,unclaimed) time.sleep(30)
# # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() gsite=condorMonitor.Group(status,lambda el:el['GLIDEIN_Site'],lambda el:el) gsite.load() sites=gsite.fetchStored() print time.ctime() for s in sites.keys(): print "Site: '%s' VMs: %i"%(s,len(sites[s])) time.sleep(30) Add distinction between claimed and unclaimed# # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() ssite=condorMonitor.Summarize(status,lambda el:[el['GLIDEIN_Site'],el['State']]) sites=ssite.countStored() print time.ctime() for s in sites.keys(): states=sites[s] if states.has_key('Claimed'): claimed=states['Claimed'] else: claimed=0 if states.has_key('Unclaimed'): unclaimed=states['Unclaimed'] else: unclaimed=0 print "Site: '%s' Claimed: %i Unclaimed: %i"%(s,claimed,unclaimed) time.sleep(30)
<commit_before># # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() gsite=condorMonitor.Group(status,lambda el:el['GLIDEIN_Site'],lambda el:el) gsite.load() sites=gsite.fetchStored() print time.ctime() for s in sites.keys(): print "Site: '%s' VMs: %i"%(s,len(sites[s])) time.sleep(30) <commit_msg>Add distinction between claimed and unclaimed<commit_after># # Description: # This is the main of the poolWatcher # # Arguments: (eventually) # $1 = poll period (in seconds) # $2 = advertize rate (every $2 loops) # $3 = glidein submit_dir # # Author: # Igor Sfiligoi (Feb 13th 2007) # import os import os.path import sys import traceback import time sys.path.append("../lib") import condorMonitor while 1: status=condorMonitor.CondorStatus() status.load() ssite=condorMonitor.Summarize(status,lambda el:[el['GLIDEIN_Site'],el['State']]) sites=ssite.countStored() print time.ctime() for s in sites.keys(): states=sites[s] if states.has_key('Claimed'): claimed=states['Claimed'] else: claimed=0 if states.has_key('Unclaimed'): unclaimed=states['Unclaimed'] else: unclaimed=0 print "Site: '%s' Claimed: %i Unclaimed: %i"%(s,claimed,unclaimed) time.sleep(30)
d18b8806bd0ec6b677268b91c135eee84e1d48a4
dthm4kaiako/config/__init__.py
dthm4kaiako/config/__init__.py
"""Configuration for Django system.""" __version__ = "0.17.3" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
"""Configuration for Django system.""" __version__ = "0.17.4" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
Increment version number to 0.17.4
Increment version number to 0.17.4
Python
mit
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
"""Configuration for Django system.""" __version__ = "0.17.3" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] ) Increment version number to 0.17.4
"""Configuration for Django system.""" __version__ = "0.17.4" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
<commit_before>"""Configuration for Django system.""" __version__ = "0.17.3" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] ) <commit_msg>Increment version number to 0.17.4<commit_after>
"""Configuration for Django system.""" __version__ = "0.17.4" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
"""Configuration for Django system.""" __version__ = "0.17.3" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] ) Increment version number to 0.17.4"""Configuration for Django system.""" __version__ = "0.17.4" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
<commit_before>"""Configuration for Django system.""" __version__ = "0.17.3" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] ) <commit_msg>Increment version number to 0.17.4<commit_after>"""Configuration for Django system.""" __version__ = "0.17.4" __version_info__ = tuple( [ int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split(".") ] )
7c4f239928dce777b40574febdd9aabe3f4ada02
mopidy_spotify/playlists.py
mopidy_spotify/playlists.py
from __future__ import unicode_literals import logging from mopidy import backend, models import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folder = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folder.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folder.pop() continue if not sp_playlist.is_loaded: continue name = '/'.join(folder + [sp_playlist.name]) # TODO Add "by <playlist owner>" to name tracks = [ translator.to_track(sp_track) for sp_track in sp_playlist.tracks ] tracks = filter(None, tracks) playlist = models.Playlist( uri=sp_playlist.link.uri, name=name, tracks=tracks) result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO
from __future__ import unicode_literals import logging from mopidy import backend import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folders = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folders.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folders.pop() continue playlist = translator.to_playlist(sp_playlist, folders=folders) if playlist is not None: result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO
Use to_playlist in the playlist provider
Use to_playlist in the playlist provider
Python
apache-2.0
jodal/mopidy-spotify,kingosticks/mopidy-spotify,mopidy/mopidy-spotify
from __future__ import unicode_literals import logging from mopidy import backend, models import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folder = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folder.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folder.pop() continue if not sp_playlist.is_loaded: continue name = '/'.join(folder + [sp_playlist.name]) # TODO Add "by <playlist owner>" to name tracks = [ translator.to_track(sp_track) for sp_track in sp_playlist.tracks ] tracks = filter(None, tracks) playlist = models.Playlist( uri=sp_playlist.link.uri, name=name, tracks=tracks) result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO Use to_playlist in the playlist provider
from __future__ import unicode_literals import logging from mopidy import backend import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folders = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folders.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folders.pop() continue playlist = translator.to_playlist(sp_playlist, folders=folders) if playlist is not None: result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO
<commit_before>from __future__ import unicode_literals import logging from mopidy import backend, models import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folder = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folder.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folder.pop() continue if not sp_playlist.is_loaded: continue name = '/'.join(folder + [sp_playlist.name]) # TODO Add "by <playlist owner>" to name tracks = [ translator.to_track(sp_track) for sp_track in sp_playlist.tracks ] tracks = filter(None, tracks) playlist = models.Playlist( uri=sp_playlist.link.uri, name=name, tracks=tracks) result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO <commit_msg>Use to_playlist in the playlist provider<commit_after>
from __future__ import unicode_literals import logging from mopidy import backend import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folders = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folders.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folders.pop() continue playlist = translator.to_playlist(sp_playlist, folders=folders) if playlist is not None: result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO
from __future__ import unicode_literals import logging from mopidy import backend, models import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folder = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folder.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folder.pop() continue if not sp_playlist.is_loaded: continue name = '/'.join(folder + [sp_playlist.name]) # TODO Add "by <playlist owner>" to name tracks = [ translator.to_track(sp_track) for sp_track in sp_playlist.tracks ] tracks = filter(None, tracks) playlist = models.Playlist( uri=sp_playlist.link.uri, name=name, tracks=tracks) result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO Use to_playlist in the playlist providerfrom __future__ import unicode_literals import logging from mopidy import backend import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folders = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folders.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folders.pop() continue playlist = translator.to_playlist(sp_playlist, folders=folders) if playlist is not None: result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO
<commit_before>from __future__ import unicode_literals import logging from mopidy import backend, models import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folder = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folder.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folder.pop() continue if not sp_playlist.is_loaded: continue name = '/'.join(folder + [sp_playlist.name]) # TODO Add "by <playlist owner>" to name tracks = [ translator.to_track(sp_track) for sp_track in sp_playlist.tracks ] tracks = filter(None, tracks) playlist = models.Playlist( uri=sp_playlist.link.uri, name=name, tracks=tracks) result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO <commit_msg>Use to_playlist in the playlist provider<commit_after>from __future__ import unicode_literals import logging from mopidy import backend import spotify from mopidy_spotify import translator logger = logging.getLogger(__name__) class SpotifyPlaylistsProvider(backend.PlaylistsProvider): def __init__(self, backend): self._backend = backend def create(self, name): pass # TODO def delete(self, uri): pass # TODO def lookup(self, uri): pass # TODO @property def playlists(self): # XXX We should just return light-weight Ref objects here, but Mopidy's # core and backend APIs must be changed first. if self._backend._session.playlist_container is None: return [] result = [] folders = [] for sp_playlist in self._backend._session.playlist_container: if isinstance(sp_playlist, spotify.PlaylistFolder): if sp_playlist.type is spotify.PlaylistType.START_FOLDER: folders.append(sp_playlist.name) elif sp_playlist.type is spotify.PlaylistType.END_FOLDER: folders.pop() continue playlist = translator.to_playlist(sp_playlist, folders=folders) if playlist is not None: result.append(playlist) # TODO Add starred playlist return result def refresh(self): pass # TODO def save(self, playlist): pass # TODO
c8e1c9720a29d0efa719467774e12ccf06a7989b
tests/test_frog.py
tests/test_frog.py
import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC")
import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") def test_csv(): """ Test whether csv format correctly adds id and simplified POS tag """ c = FrogLemmatizer() result = ('sentence,offset,word,lemma,morphofeat,ner,chunk\n' '1,0,dit,dit,"VNW(aanw,pron,stan,vol,3o,ev)",O,B-NP\n' '1,3,is,zijn,"WW(pv,tgw,ev)",O,B-VP\n') result = c.convert(123, result, "csv") r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(set(r[0].keys()), {"id", "sentence", "offset", "word", "lemma", "morphofeat", "ner", "chunk", "pos"}) assert_equal(r[0]["id"], "123") assert_equal(r[0]["pos"], "O") assert_equal(r[1]["pos"], "V")
Add test for frog csv format
Add test for frog csv format
Python
mit
vanatteveldt/nlpipe,vanatteveldt/nlpipe,vanatteveldt/nlpipe
import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") Add test for frog csv format
import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") def test_csv(): """ Test whether csv format correctly adds id and simplified POS tag """ c = FrogLemmatizer() result = ('sentence,offset,word,lemma,morphofeat,ner,chunk\n' '1,0,dit,dit,"VNW(aanw,pron,stan,vol,3o,ev)",O,B-NP\n' '1,3,is,zijn,"WW(pv,tgw,ev)",O,B-VP\n') result = c.convert(123, result, "csv") r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(set(r[0].keys()), {"id", "sentence", "offset", "word", "lemma", "morphofeat", "ner", "chunk", "pos"}) assert_equal(r[0]["id"], "123") assert_equal(r[0]["pos"], "O") assert_equal(r[1]["pos"], "V")
<commit_before>import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") <commit_msg>Add test for frog csv format<commit_after>
import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") def test_csv(): """ Test whether csv format correctly adds id and simplified POS tag """ c = FrogLemmatizer() result = ('sentence,offset,word,lemma,morphofeat,ner,chunk\n' '1,0,dit,dit,"VNW(aanw,pron,stan,vol,3o,ev)",O,B-NP\n' '1,3,is,zijn,"WW(pv,tgw,ev)",O,B-VP\n') result = c.convert(123, result, "csv") r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(set(r[0].keys()), {"id", "sentence", "offset", "word", "lemma", "morphofeat", "ner", "chunk", "pos"}) assert_equal(r[0]["id"], "123") assert_equal(r[0]["pos"], "O") assert_equal(r[1]["pos"], "V")
import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") Add test for frog csv formatimport csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") def test_csv(): """ Test whether csv format correctly adds id and simplified POS tag """ c = FrogLemmatizer() result = ('sentence,offset,word,lemma,morphofeat,ner,chunk\n' '1,0,dit,dit,"VNW(aanw,pron,stan,vol,3o,ev)",O,B-NP\n' '1,3,is,zijn,"WW(pv,tgw,ev)",O,B-VP\n') result = c.convert(123, result, "csv") r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(set(r[0].keys()), {"id", "sentence", "offset", "word", "lemma", "morphofeat", "ner", "chunk", "pos"}) assert_equal(r[0]["id"], "123") assert_equal(r[0]["pos"], "O") assert_equal(r[1]["pos"], "V")
<commit_before>import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") <commit_msg>Add test for frog csv format<commit_after>import csv from io import StringIO import logging from nose.tools import assert_equal from unittest import SkipTest from nlpipe.modules.frog import FrogLemmatizer from tests.tools import check_status def test_process(): """ Test Frog lemmatizing Make sure a frog server is listening at port 9000, e.g.: sudo docker run -dp 9887:9887 proycon/lamachine frog -S 9887 --skip=pm """ c = FrogLemmatizer() check_status(c) result = c.process("Nederlandse woordjes") print(result) r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(r[0]["lemma"], "nederlands") assert_equal(r[0]["ner"], "B-LOC") def test_csv(): """ Test whether csv format correctly adds id and simplified POS tag """ c = FrogLemmatizer() result = ('sentence,offset,word,lemma,morphofeat,ner,chunk\n' '1,0,dit,dit,"VNW(aanw,pron,stan,vol,3o,ev)",O,B-NP\n' '1,3,is,zijn,"WW(pv,tgw,ev)",O,B-VP\n') result = c.convert(123, result, "csv") r = list(csv.DictReader(StringIO(result))) assert_equal(len(r), 2) assert_equal(set(r[0].keys()), {"id", "sentence", "offset", "word", "lemma", "morphofeat", "ner", "chunk", "pos"}) assert_equal(r[0]["id"], "123") assert_equal(r[0]["pos"], "O") assert_equal(r[1]["pos"], "V")
a736a7573745af7d72e4297dbfe8799ed472217a
bookmarks/models.py
bookmarks/models.py
from sqlalchemy import Column, Integer, String, Text, ForeignKey from sqlalchemy.dialects.mysql import BIGINT from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BIGINT(unsigned=True)) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id)
from sqlalchemy import Column, Integer, BigInteger, String, Text, ForeignKey from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BigInteger) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id)
Use sqlalchemy BigInteger instead of mysql BIGINT
Use sqlalchemy BigInteger instead of mysql BIGINT
Python
apache-2.0
byanofsky/bookmarks,byanofsky/bookmarks,byanofsky/bookmarks
from sqlalchemy import Column, Integer, String, Text, ForeignKey from sqlalchemy.dialects.mysql import BIGINT from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BIGINT(unsigned=True)) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id) Use sqlalchemy BigInteger instead of mysql BIGINT
from sqlalchemy import Column, Integer, BigInteger, String, Text, ForeignKey from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BigInteger) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id)
<commit_before>from sqlalchemy import Column, Integer, String, Text, ForeignKey from sqlalchemy.dialects.mysql import BIGINT from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BIGINT(unsigned=True)) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id) <commit_msg>Use sqlalchemy BigInteger instead of mysql BIGINT<commit_after>
from sqlalchemy import Column, Integer, BigInteger, String, Text, ForeignKey from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BigInteger) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id)
from sqlalchemy import Column, Integer, String, Text, ForeignKey from sqlalchemy.dialects.mysql import BIGINT from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BIGINT(unsigned=True)) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id) Use sqlalchemy BigInteger instead of mysql BIGINTfrom sqlalchemy import Column, Integer, BigInteger, String, Text, ForeignKey from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BigInteger) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id)
<commit_before>from sqlalchemy import Column, Integer, String, Text, ForeignKey from sqlalchemy.dialects.mysql import BIGINT from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BIGINT(unsigned=True)) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id) <commit_msg>Use sqlalchemy BigInteger instead of mysql BIGINT<commit_after>from sqlalchemy import Column, Integer, BigInteger, String, Text, ForeignKey from sqlalchemy.orm import relationship from bookmarks.database import Base class User(Base): __tablename__ = 'user' id = Column(Integer, primary_key=True) username = Column(String(50), unique=True, nullable=False) name = Column(String(120)) email = Column(String(256), unique=True, nullable=False) pw_hash = Column(String(60), nullable=False) bookmarks = relationship("Bookmark", back_populates="user") def __init__(self, username, name, email, pw_hash): self.username = username self.name = name self.email = email self.pw_hash = pw_hash def __repr__(self): return '<User %r>' % (self.username) class Bookmark(Base): __tablename__ = 'bookmark' id = Column(String(6), primary_key=True, unique=True, nullable=False) link = Column(Text, nullable=False) hits = Column(BigInteger) user_id = Column(Integer, ForeignKey('user.id')) user = relationship("User", back_populates="bookmarks") def __init__(self, id, link, user_id): self.id = id self.link = link self.hits = 0 self.user_id = user_id def __repr__(self): return '<Bookmark %r>' % (self.id)
b25deb2029a103548e56d7dc1fda61124b6e47d6
meteorflux/config.py
meteorflux/config.py
"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=flux_kca user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o']
"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o']
Set database name to fluxdb
Set database name to fluxdb
Python
mit
barentsen/meteor-flux,barentsen/meteor-flux,barentsen/meteor-flux,barentsen/meteor-flux
"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=flux_kca user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o'] Set database name to fluxdb
"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o']
<commit_before>"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=flux_kca user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o'] <commit_msg>Set database name to fluxdb<commit_after>
"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o']
"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=flux_kca user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o'] Set database name to fluxdb"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o']
<commit_before>"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=flux_kca user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o'] <commit_msg>Set database name to fluxdb<commit_after>"""Configuration constants.""" import os PACKAGEDIR = os.path.dirname(os.path.abspath(__file__)) HOSTNAME = os.uname()[1] if HOSTNAME == 'ec.geert.io' or HOSTNAME == 'imo.geert.io' or HOSTNAME == 'meteorflux.io': DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/var/www/tmp' TMPDIR_WWW = '/tmp' else: DEBUG = True DBINFO = 'host=/var/run/postgresql dbname=fluxdb user=postgres' TMPDIR = '/tmp' TMPDIR_WWW = '/tmp' # Database to use for unit tests DBINFO_TESTING = 'host=/var/run/postgresql dbname=testdb user=postgres' DPI = 80 # Default DPI of graphs MARKERS = ['s', '^', 'o', 's', '^', 'o', 's', '^', 'o', 's', '^', 'o']
e97fabb025e66671edbe4446efa966d853f1d6df
tools/utils.py
tools/utils.py
#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output
#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) python_path = os.environ.get('PYTHONPATH') if python_path: os.environ['PYTHONPATH'] = os.path.pathsep.join( python_path.split(os.path.pathsep)+[depot_tools]) else: os.environ['PYTHONPATH'] = depot_tools def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output
Add depot_tools to PYTHONPATH for pylint
Add depot_tools to PYTHONPATH for pylint Otherwise, pylint will fail on trybot.
Python
bsd-3-clause
weiyirong/crosswalk-1,qjia7/crosswalk,baleboy/crosswalk,pozdnyakov/crosswalk,rakuco/crosswalk,jpike88/crosswalk,baleboy/crosswalk,baleboy/crosswalk,huningxin/crosswalk,jpike88/crosswalk,myroot/crosswalk,Pluto-tv/crosswalk,Pluto-tv/crosswalk,jondong/crosswalk,DonnaWuDongxia/crosswalk,ZhengXinCN/crosswalk,tomatell/crosswalk,seanlong/crosswalk,ZhengXinCN/crosswalk,Bysmyyr/crosswalk,rakuco/crosswalk,lincsoon/crosswalk,minggangw/crosswalk,XiaosongWei/crosswalk,baleboy/crosswalk,alex-zhang/crosswalk,myroot/crosswalk,jpike88/crosswalk,zliang7/crosswalk,XiaosongWei/crosswalk,hgl888/crosswalk,fujunwei/crosswalk,marcuspridham/crosswalk,fujunwei/crosswalk,heke123/crosswalk,kurli/crosswalk,amaniak/crosswalk,leonhsl/crosswalk,lincsoon/crosswalk,rakuco/crosswalk,xzhan96/crosswalk,crosswalk-project/crosswalk-efl,RafuCater/crosswalk,kurli/crosswalk,crosswalk-project/crosswalk-efl,bestwpw/crosswalk,tomatell/crosswalk,huningxin/crosswalk,axinging/crosswalk,jondwillis/crosswalk,DonnaWuDongxia/crosswalk,alex-zhang/crosswalk,lincsoon/crosswalk,seanlong/crosswalk,hgl888/crosswalk-efl,XiaosongWei/crosswalk,jondong/crosswalk,heke123/crosswalk,pozdnyakov/crosswalk,chuan9/crosswalk,qjia7/crosswalk,crosswalk-project/crosswalk-efl,RafuCater/crosswalk,qjia7/crosswalk,tomatell/crosswalk,Shouqun/crosswalk,hgl888/crosswalk,stonegithubs/crosswalk,axinging/crosswalk,tomatell/crosswalk,pk-sam/crosswalk,bestwpw/crosswalk,alex-zhang/crosswalk,siovene/crosswalk,leonhsl/crosswalk,xzhan96/crosswalk,tedshroyer/crosswalk,alex-zhang/crosswalk,dreamsxin/crosswalk,tomatell/crosswalk,RafuCater/crosswalk,siovene/crosswalk,chuan9/crosswalk,hgl888/crosswalk-efl,crosswalk-project/crosswalk,pk-sam/crosswalk,zliang7/crosswalk,wuhengzhi/crosswalk,Pluto-tv/crosswalk,crosswalk-project/crosswalk,rakuco/crosswalk,zliang7/crosswalk,qjia7/crosswalk,siovene/crosswalk,TheDirtyCalvinist/spacewalk,tedshroyer/crosswalk,kurli/crosswalk,wuhengzhi/crosswalk,hgl888/crosswalk,chinakids/crosswalk,myroot/crosswalk,qjia7/crosswalk,minggangw/crosswalk,huningxin/crosswalk,seanlong/crosswalk,ZhengXinCN/crosswalk,tedshroyer/crosswalk,Shouqun/crosswalk,tedshroyer/crosswalk,xzhan96/crosswalk,crosswalk-project/crosswalk-efl,baleboy/crosswalk,Bysmyyr/crosswalk,tomatell/crosswalk,chinakids/crosswalk,RafuCater/crosswalk,huningxin/crosswalk,jondong/crosswalk,leonhsl/crosswalk,darktears/crosswalk,hgl888/crosswalk,amaniak/crosswalk,darktears/crosswalk,zliang7/crosswalk,mrunalk/crosswalk,dreamsxin/crosswalk,Pluto-tv/crosswalk,chuan9/crosswalk,shaochangbin/crosswalk,bestwpw/crosswalk,shaochangbin/crosswalk,hgl888/crosswalk-efl,tomatell/crosswalk,axinging/crosswalk,hgl888/crosswalk,myroot/crosswalk,XiaosongWei/crosswalk,minggangw/crosswalk,weiyirong/crosswalk-1,pk-sam/crosswalk,zeropool/crosswalk,darktears/crosswalk,hgl888/crosswalk-efl,dreamsxin/crosswalk,Bysmyyr/crosswalk,jpike88/crosswalk,mrunalk/crosswalk,Bysmyyr/crosswalk,ZhengXinCN/crosswalk,bestwpw/crosswalk,zliang7/crosswalk,amaniak/crosswalk,DonnaWuDongxia/crosswalk,Pluto-tv/crosswalk,huningxin/crosswalk,jondwillis/crosswalk,PeterWangIntel/crosswalk,darktears/crosswalk,leonhsl/crosswalk,chuan9/crosswalk,lincsoon/crosswalk,leonhsl/crosswalk,Pluto-tv/crosswalk,Shouqun/crosswalk,fujunwei/crosswalk,dreamsxin/crosswalk,lincsoon/crosswalk,RafuCater/crosswalk,Shouqun/crosswalk,PeterWangIntel/crosswalk,lincsoon/crosswalk,fujunwei/crosswalk,minggangw/crosswalk,fujunwei/crosswalk,zeropool/crosswalk,darktears/crosswalk,stonegithubs/crosswalk,weiyirong/crosswalk-1,amaniak/crosswalk,TheDirtyCalvinist/spacewalk,crosswalk-project/crosswalk,heke123/crosswalk,jondong/crosswalk,xzhan96/crosswalk,myroot/crosswalk,lincsoon/crosswalk,marcuspridham/crosswalk,pozdnyakov/crosswalk,marcuspridham/crosswalk,jpike88/crosswalk,TheDirtyCalvinist/spacewalk,mrunalk/crosswalk,minggangw/crosswalk,Shouqun/crosswalk,bestwpw/crosswalk,hgl888/crosswalk,tedshroyer/crosswalk,DonnaWuDongxia/crosswalk,Shouqun/crosswalk,heke123/crosswalk,heke123/crosswalk,amaniak/crosswalk,weiyirong/crosswalk-1,shaochangbin/crosswalk,crosswalk-project/crosswalk,stonegithubs/crosswalk,siovene/crosswalk,chinakids/crosswalk,pk-sam/crosswalk,jpike88/crosswalk,rakuco/crosswalk,kurli/crosswalk,wuhengzhi/crosswalk,zeropool/crosswalk,Pluto-tv/crosswalk,Bysmyyr/crosswalk,TheDirtyCalvinist/spacewalk,rakuco/crosswalk,kurli/crosswalk,amaniak/crosswalk,crosswalk-project/crosswalk-efl,zeropool/crosswalk,myroot/crosswalk,kurli/crosswalk,zliang7/crosswalk,chinakids/crosswalk,jondwillis/crosswalk,hgl888/crosswalk-efl,leonhsl/crosswalk,darktears/crosswalk,jondong/crosswalk,jondwillis/crosswalk,alex-zhang/crosswalk,ZhengXinCN/crosswalk,minggangw/crosswalk,xzhan96/crosswalk,Bysmyyr/crosswalk,chinakids/crosswalk,bestwpw/crosswalk,hgl888/crosswalk,baleboy/crosswalk,shaochangbin/crosswalk,minggangw/crosswalk,chinakids/crosswalk,zeropool/crosswalk,mrunalk/crosswalk,leonhsl/crosswalk,shaochangbin/crosswalk,heke123/crosswalk,pk-sam/crosswalk,stonegithubs/crosswalk,hgl888/crosswalk,hgl888/crosswalk-efl,jondong/crosswalk,marcuspridham/crosswalk,wuhengzhi/crosswalk,zliang7/crosswalk,qjia7/crosswalk,pozdnyakov/crosswalk,fujunwei/crosswalk,wuhengzhi/crosswalk,zeropool/crosswalk,pozdnyakov/crosswalk,crosswalk-project/crosswalk,hgl888/crosswalk-efl,XiaosongWei/crosswalk,seanlong/crosswalk,rakuco/crosswalk,marcuspridham/crosswalk,heke123/crosswalk,PeterWangIntel/crosswalk,xzhan96/crosswalk,tedshroyer/crosswalk,seanlong/crosswalk,tedshroyer/crosswalk,chuan9/crosswalk,crosswalk-project/crosswalk-efl,PeterWangIntel/crosswalk,PeterWangIntel/crosswalk,siovene/crosswalk,stonegithubs/crosswalk,jondwillis/crosswalk,jondwillis/crosswalk,baleboy/crosswalk,wuhengzhi/crosswalk,Bysmyyr/crosswalk,chuan9/crosswalk,weiyirong/crosswalk-1,ZhengXinCN/crosswalk,amaniak/crosswalk,axinging/crosswalk,siovene/crosswalk,stonegithubs/crosswalk,xzhan96/crosswalk,marcuspridham/crosswalk,mrunalk/crosswalk,stonegithubs/crosswalk,pk-sam/crosswalk,dreamsxin/crosswalk,XiaosongWei/crosswalk,axinging/crosswalk,xzhan96/crosswalk,jpike88/crosswalk,axinging/crosswalk,DonnaWuDongxia/crosswalk,crosswalk-project/crosswalk,bestwpw/crosswalk,darktears/crosswalk,chuan9/crosswalk,alex-zhang/crosswalk,Bysmyyr/crosswalk,marcuspridham/crosswalk,alex-zhang/crosswalk,DonnaWuDongxia/crosswalk,crosswalk-project/crosswalk,zeropool/crosswalk,huningxin/crosswalk,pk-sam/crosswalk,crosswalk-project/crosswalk,zliang7/crosswalk,marcuspridham/crosswalk,heke123/crosswalk,XiaosongWei/crosswalk,PeterWangIntel/crosswalk,mrunalk/crosswalk,axinging/crosswalk,jondong/crosswalk,lincsoon/crosswalk,weiyirong/crosswalk-1,RafuCater/crosswalk,jondong/crosswalk,shaochangbin/crosswalk,crosswalk-project/crosswalk-efl,pozdnyakov/crosswalk,fujunwei/crosswalk,jondwillis/crosswalk,seanlong/crosswalk,ZhengXinCN/crosswalk,rakuco/crosswalk,baleboy/crosswalk,darktears/crosswalk,weiyirong/crosswalk-1,TheDirtyCalvinist/spacewalk,minggangw/crosswalk,dreamsxin/crosswalk,dreamsxin/crosswalk,siovene/crosswalk,DonnaWuDongxia/crosswalk,TheDirtyCalvinist/spacewalk,RafuCater/crosswalk,PeterWangIntel/crosswalk
#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output Add depot_tools to PYTHONPATH for pylint Otherwise, pylint will fail on trybot.
#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) python_path = os.environ.get('PYTHONPATH') if python_path: os.environ['PYTHONPATH'] = os.path.pathsep.join( python_path.split(os.path.pathsep)+[depot_tools]) else: os.environ['PYTHONPATH'] = depot_tools def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output
<commit_before>#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output <commit_msg>Add depot_tools to PYTHONPATH for pylint Otherwise, pylint will fail on trybot.<commit_after>
#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) python_path = os.environ.get('PYTHONPATH') if python_path: os.environ['PYTHONPATH'] = os.path.pathsep.join( python_path.split(os.path.pathsep)+[depot_tools]) else: os.environ['PYTHONPATH'] = depot_tools def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output
#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output Add depot_tools to PYTHONPATH for pylint Otherwise, pylint will fail on trybot.#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) python_path = os.environ.get('PYTHONPATH') if python_path: os.environ['PYTHONPATH'] = os.path.pathsep.join( python_path.split(os.path.pathsep)+[depot_tools]) else: os.environ['PYTHONPATH'] = depot_tools def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output
<commit_before>#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output <commit_msg>Add depot_tools to PYTHONPATH for pylint Otherwise, pylint will fail on trybot.<commit_after>#!/usr/bin/env python ''' This script provides utils for python scripts in cameo. ''' import os import sys import subprocess def TryAddDepotToolsToPythonPath(): depot_tools = FindDepotToolsInPath() if depot_tools: sys.path.append(depot_tools) python_path = os.environ.get('PYTHONPATH') if python_path: os.environ['PYTHONPATH'] = os.path.pathsep.join( python_path.split(os.path.pathsep)+[depot_tools]) else: os.environ['PYTHONPATH'] = depot_tools def FindDepotToolsInPath(): paths = os.getenv('PATH').split(os.path.pathsep) for path in paths: if os.path.basename(path) == '': # path is end with os.path.pathsep path = os.path.dirname(path) if os.path.basename(path) == 'depot_tools': return path return None def IsWindows(): return sys.platform == 'cygwin' or sys.platform.startswith('win') def IsLinux(): return sys.platform.startswith('linux') def IsMac(): return sys.platform.startswith('darwin') def GitExe(): if IsWindows(): return 'git.bat' else: return 'git' def GetCommandOutput(command, cwd=None): proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=1, cwd=cwd) output = proc.communicate()[0] result = proc.returncode if result: raise Exception('%s: %s' % (subprocess.list2cmdline(command), output)) return output
ec784672ec59274e4bb4c227935439d6c0b31155
numatuned/virsh.py
numatuned/virsh.py
import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): subprocess.call(["migratepages", self.get_pid, "all", str(zone.number)]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid'
import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): self.execute(["numatune", self.domain, "--nodeset", str(zone.number), "--mode", "preferred"]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid'
Use numatune but use mode preferred
Use numatune but use mode preferred
Python
mit
dionbosschieter/numatuned,dionbosschieter/numatuned
import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): subprocess.call(["migratepages", self.get_pid, "all", str(zone.number)]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid' Use numatune but use mode preferred
import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): self.execute(["numatune", self.domain, "--nodeset", str(zone.number), "--mode", "preferred"]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid'
<commit_before>import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): subprocess.call(["migratepages", self.get_pid, "all", str(zone.number)]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid' <commit_msg>Use numatune but use mode preferred<commit_after>
import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): self.execute(["numatune", self.domain, "--nodeset", str(zone.number), "--mode", "preferred"]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid'
import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): subprocess.call(["migratepages", self.get_pid, "all", str(zone.number)]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid' Use numatune but use mode preferredimport glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): self.execute(["numatune", self.domain, "--nodeset", str(zone.number), "--mode", "preferred"]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid'
<commit_before>import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): subprocess.call(["migratepages", self.get_pid, "all", str(zone.number)]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid' <commit_msg>Use numatune but use mode preferred<commit_after>import glob import subprocess from .read import read class Virsh: """Class can be used to execute virsh commands for a given domain""" domain = "" def __init__(self, domain): self.domain = domain @staticmethod def get_domain_list(): pid_files = glob.glob('/var/run/libvirt/qemu/*.pid') domain_list = {} for pid_file in pid_files: # read the pid domain_list[pid_file] = read(pid_file) return domain_list def execute(self, arguments): output = subprocess.check_output(["virsh"] + arguments, stderr=subprocess.STDOUT) return output.decode('utf-8') def migrate_to(self, zone): self.execute(["numatune", self.domain, "--nodeset", str(zone.number), "--mode", "preferred"]) def is_running(self): output = self.execute(["domstate", self.domain]) domstate = output.strip('\n').strip(' ').strip('\n') return domstate == 'running' def get_pid(self): pid = read(self.get_pid_file()) return pid def get_pid_file(self): return '/var/run/libvirt/qemu/' + self.domain + '.pid'
c06d4ddc54bbe4b10dd0722f5a76d9cb7550da53
tests/test_config.py
tests/test_config.py
from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5
from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) @fixture def incomplete_config(): return Config({}) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 def test_incomplete_config_get_riemann(incomplete_config): assert isinstance(incomplete_config.riemann, RiemannConfig)
Test case when there's no riemann config
Test case when there's no riemann config
Python
mit
CodersOfTheNight/oshino
from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 Test case when there's no riemann config
from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) @fixture def incomplete_config(): return Config({}) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 def test_incomplete_config_get_riemann(incomplete_config): assert isinstance(incomplete_config.riemann, RiemannConfig)
<commit_before>from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 <commit_msg>Test case when there's no riemann config<commit_after>
from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) @fixture def incomplete_config(): return Config({}) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 def test_incomplete_config_get_riemann(incomplete_config): assert isinstance(incomplete_config.riemann, RiemannConfig)
from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 Test case when there's no riemann configfrom pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) @fixture def incomplete_config(): return Config({}) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 def test_incomplete_config_get_riemann(incomplete_config): assert isinstance(incomplete_config.riemann, RiemannConfig)
<commit_before>from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 <commit_msg>Test case when there's no riemann config<commit_after>from pytest import fixture from oshino.config import Config, RiemannConfig @fixture def base_config(): return Config({"riemann": {"host": "localhost", "port": 5555 }, "interval": 5 }) @fixture def incomplete_config(): return Config({}) def test_base_config_get_riemann(base_config): assert isinstance(base_config.riemann, RiemannConfig) def test_base_config_interval(base_config): assert base_config.interval == 5 def test_incomplete_config_get_riemann(incomplete_config): assert isinstance(incomplete_config.riemann, RiemannConfig)
28bb18bae725af7c7af434c809dc2e32f8a3fcd6
ticketus/ui/views.py
ticketus/ui/views.py
from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=ticket.requester) ticket.comment_set.add(c) return redirect(ticket)
from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=request.user) ticket.comment_set.add(c) return redirect(ticket)
Set commenter to logged in user, not ticket requestor
Set commenter to logged in user, not ticket requestor
Python
bsd-2-clause
sjkingo/ticketus,sjkingo/ticketus,sjkingo/ticketus,sjkingo/ticketus
from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=ticket.requester) ticket.comment_set.add(c) return redirect(ticket) Set commenter to logged in user, not ticket requestor
from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=request.user) ticket.comment_set.add(c) return redirect(ticket)
<commit_before>from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=ticket.requester) ticket.comment_set.add(c) return redirect(ticket) <commit_msg>Set commenter to logged in user, not ticket requestor<commit_after>
from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=request.user) ticket.comment_set.add(c) return redirect(ticket)
from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=ticket.requester) ticket.comment_set.add(c) return redirect(ticket) Set commenter to logged in user, not ticket requestorfrom django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=request.user) ticket.comment_set.add(c) return redirect(ticket)
<commit_before>from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=ticket.requester) ticket.comment_set.add(c) return redirect(ticket) <commit_msg>Set commenter to logged in user, not ticket requestor<commit_after>from django.shortcuts import get_object_or_404, render, redirect from django.views.decorators.http import require_POST from ticketus.core.models import * from ticketus.core.forms import CommentForm def ticket_list(request, template='ui/ticket_list.html'): tickets = Ticket.objects.all() context = {'tickets': tickets} return render(request, template, context) def ticket_page(request, ticket_id, template='ui/ticket_page.html'): ticket = get_object_or_404(Ticket, id=ticket_id) context = {'ticket': ticket} return render(request, template, context) @require_POST def post_new_comment(request, ticket_id): ticket = get_object_or_404(Ticket, id=ticket_id) form = CommentForm(request.POST) if form.is_valid(): c = Comment(raw_text=form.cleaned_data['raw_text'], commenter=request.user) ticket.comment_set.add(c) return redirect(ticket)
33ce8c19469b703b73727bd759b9655124919ae5
script/coroutines.py
script/coroutines.py
# -*- coding: ascii -*- # A generator-based coroutine framework. import select def run(routines): while routines: for r in routines: try: result = r.next() except StopIteration: routines.remove(r)
# -*- coding: ascii -*- # A generator-based coroutine framework. import select class Executor: def __init__(self): self.routines = {} def add(self, routine): self.routines[routine] = True def _remove(self, routine): self.routines.pop(routine, None) def __call__(self): while self.routines: for r in tuple(self.routines): try: r.next() except StopIteration: self._remove(r) def run(routines): ex = Executor() for r in routines: ex.add(r) r()
Convert coroutine executor to OOP
[Scripts] Convert coroutine executor to OOP
Python
mit
CylonicRaider/Instant,CylonicRaider/Instant,CylonicRaider/Instant,CylonicRaider/Instant,CylonicRaider/Instant
# -*- coding: ascii -*- # A generator-based coroutine framework. import select def run(routines): while routines: for r in routines: try: result = r.next() except StopIteration: routines.remove(r) [Scripts] Convert coroutine executor to OOP
# -*- coding: ascii -*- # A generator-based coroutine framework. import select class Executor: def __init__(self): self.routines = {} def add(self, routine): self.routines[routine] = True def _remove(self, routine): self.routines.pop(routine, None) def __call__(self): while self.routines: for r in tuple(self.routines): try: r.next() except StopIteration: self._remove(r) def run(routines): ex = Executor() for r in routines: ex.add(r) r()
<commit_before> # -*- coding: ascii -*- # A generator-based coroutine framework. import select def run(routines): while routines: for r in routines: try: result = r.next() except StopIteration: routines.remove(r) <commit_msg>[Scripts] Convert coroutine executor to OOP<commit_after>
# -*- coding: ascii -*- # A generator-based coroutine framework. import select class Executor: def __init__(self): self.routines = {} def add(self, routine): self.routines[routine] = True def _remove(self, routine): self.routines.pop(routine, None) def __call__(self): while self.routines: for r in tuple(self.routines): try: r.next() except StopIteration: self._remove(r) def run(routines): ex = Executor() for r in routines: ex.add(r) r()
# -*- coding: ascii -*- # A generator-based coroutine framework. import select def run(routines): while routines: for r in routines: try: result = r.next() except StopIteration: routines.remove(r) [Scripts] Convert coroutine executor to OOP # -*- coding: ascii -*- # A generator-based coroutine framework. import select class Executor: def __init__(self): self.routines = {} def add(self, routine): self.routines[routine] = True def _remove(self, routine): self.routines.pop(routine, None) def __call__(self): while self.routines: for r in tuple(self.routines): try: r.next() except StopIteration: self._remove(r) def run(routines): ex = Executor() for r in routines: ex.add(r) r()
<commit_before> # -*- coding: ascii -*- # A generator-based coroutine framework. import select def run(routines): while routines: for r in routines: try: result = r.next() except StopIteration: routines.remove(r) <commit_msg>[Scripts] Convert coroutine executor to OOP<commit_after> # -*- coding: ascii -*- # A generator-based coroutine framework. import select class Executor: def __init__(self): self.routines = {} def add(self, routine): self.routines[routine] = True def _remove(self, routine): self.routines.pop(routine, None) def __call__(self): while self.routines: for r in tuple(self.routines): try: r.next() except StopIteration: self._remove(r) def run(routines): ex = Executor() for r in routines: ex.add(r) r()
29bf7ef7de9e0a5c66876b126f4df7ef279e30b6
mediacloud/mediawords/db/exceptions/handler.py
mediacloud/mediawords/db/exceptions/handler.py
class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McSelectException(McDatabaseHandlerException): """select() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass
class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass
Revert "Add exception to be thrown by select()"
Revert "Add exception to be thrown by select()" This reverts commit 1009bd3b5e5941aff2f7b3852494ee19f085dcce.
Python
agpl-3.0
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McSelectException(McDatabaseHandlerException): """select() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass Revert "Add exception to be thrown by select()" This reverts commit 1009bd3b5e5941aff2f7b3852494ee19f085dcce.
class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass
<commit_before>class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McSelectException(McDatabaseHandlerException): """select() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass <commit_msg>Revert "Add exception to be thrown by select()" This reverts commit 1009bd3b5e5941aff2f7b3852494ee19f085dcce.<commit_after>
class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass
class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McSelectException(McDatabaseHandlerException): """select() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass Revert "Add exception to be thrown by select()" This reverts commit 1009bd3b5e5941aff2f7b3852494ee19f085dcce.class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass
<commit_before>class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McSelectException(McDatabaseHandlerException): """select() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass <commit_msg>Revert "Add exception to be thrown by select()" This reverts commit 1009bd3b5e5941aff2f7b3852494ee19f085dcce.<commit_after>class McDatabaseHandlerException(Exception): """Database handler exception.""" pass class McConnectException(McDatabaseHandlerException): """__connect() exception.""" pass class McSchemaIsUpToDateException(McDatabaseHandlerException): """schema_is_up_to_date() exception.""" pass class McQueryException(McDatabaseHandlerException): """query() exception.""" pass class McPrimaryKeyColumnException(McDatabaseHandlerException): """primary_key_column() exception.""" pass class McFindByIDException(McDatabaseHandlerException): """find_by_id() exception.""" pass class McRequireByIDException(McDatabaseHandlerException): """require_by_id() exception.""" pass class McUpdateByIDException(McDatabaseHandlerException): """update_by_id() exception.""" pass class McDeleteByIDException(McDatabaseHandlerException): """delete_by_id() exception.""" pass class McCreateException(McDatabaseHandlerException): """create() exception.""" pass class McFindOrCreateException(McDatabaseHandlerException): """find_or_create() exception.""" pass class McQuoteException(McDatabaseHandlerException): """quote() exception.""" pass class McPrepareException(McDatabaseHandlerException): """prepare() exception.""" pass class McQueryPagedHashesException(McDatabaseHandlerException): """query_paged_hashes() exception.""" pass class McTransactionException(McDatabaseHandlerException): """Exception thrown on transaction problems.""" pass class McBeginException(McTransactionException): """begin() exception.""" pass
874d6f568a1367cbaad077648202f3328cd2eb8f
modules/Metadata/entropy.py
modules/Metadata/entropy.py
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata)
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(conf=DEFAULTCONF): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata)
Add conf arg to check function
Add conf arg to check function
Python
mpl-2.0
jmlong1027/multiscanner,awest1339/multiscanner,awest1339/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,mitre/multiscanner,mitre/multiscanner
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata) Add conf arg to check function
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(conf=DEFAULTCONF): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata)
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata) <commit_msg>Add conf arg to check function<commit_after>
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(conf=DEFAULTCONF): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata)
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata) Add conf arg to check function# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(conf=DEFAULTCONF): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata)
<commit_before># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata) <commit_msg>Add conf arg to check function<commit_after># This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import division, absolute_import, with_statement, print_function, unicode_literals from collections import Counter import math __author__ = "Austin West" __license__ = "MPL 2.0" TYPE = "Metadata" NAME = "entropy" def check(conf=DEFAULTCONF): return True def scan(filelist): '''Calculate entropy of a string''' results = [] for fname in filelist: with open(fname, 'rb') as f: text = f.read() chars, lns = Counter(text), float(len(text)) result = -sum(count/lns * math.log(count/lns, 2) for count in chars.values()) results.append((fname, result)) metadata = {} metadata["Name"] = NAME metadata["Type"] = TYPE metadata["Include"] = False return (results, metadata)
ae31e3dcb5e4e2f7a7d6d12b92621c1750eb6a0e
dask_distance/_utils.py
dask_distance/_utils.py
import dask import dask.array import dask_distance._compat def _bool_cmp_mtx_cnt(u, v): u = dask_distance._compat._asarray(u) v = dask_distance._compat._asarray(v) u_1 = u.astype(bool) v_1 = v.astype(bool) u_0 = ~u_1 v_0 = ~v_1 uv_11 = u_1 & v_1 uv_10 = u_1 & v_0 uv_01 = u_0 & v_1 uv_00 = u_0 & v_0 uv_11_sum = uv_11.sum(axis=0, dtype=float) uv_10_sum = uv_10.sum(axis=0, dtype=float) uv_01_sum = uv_01.sum(axis=0, dtype=float) uv_00_sum = uv_00.sum(axis=0, dtype=float) uv_cmp_mtx_cnts = dask.array.stack([ dask.array.stack([uv_00_sum, uv_01_sum]), dask.array.stack([uv_10_sum, uv_11_sum]), ]) return uv_cmp_mtx_cnts
Create a comparison matrix for 2 1-D bool arrays
Create a comparison matrix for 2 1-D bool arrays Simply count the number of combinations of true and false between the two 1-D arrays and sum them together. The result is then stacked into a 2-D array where the indices match the particular combination of true and false seen in each 1-D bool array in order.
Python
bsd-3-clause
jakirkham/dask-distance
Create a comparison matrix for 2 1-D bool arrays Simply count the number of combinations of true and false between the two 1-D arrays and sum them together. The result is then stacked into a 2-D array where the indices match the particular combination of true and false seen in each 1-D bool array in order.
import dask import dask.array import dask_distance._compat def _bool_cmp_mtx_cnt(u, v): u = dask_distance._compat._asarray(u) v = dask_distance._compat._asarray(v) u_1 = u.astype(bool) v_1 = v.astype(bool) u_0 = ~u_1 v_0 = ~v_1 uv_11 = u_1 & v_1 uv_10 = u_1 & v_0 uv_01 = u_0 & v_1 uv_00 = u_0 & v_0 uv_11_sum = uv_11.sum(axis=0, dtype=float) uv_10_sum = uv_10.sum(axis=0, dtype=float) uv_01_sum = uv_01.sum(axis=0, dtype=float) uv_00_sum = uv_00.sum(axis=0, dtype=float) uv_cmp_mtx_cnts = dask.array.stack([ dask.array.stack([uv_00_sum, uv_01_sum]), dask.array.stack([uv_10_sum, uv_11_sum]), ]) return uv_cmp_mtx_cnts
<commit_before><commit_msg>Create a comparison matrix for 2 1-D bool arrays Simply count the number of combinations of true and false between the two 1-D arrays and sum them together. The result is then stacked into a 2-D array where the indices match the particular combination of true and false seen in each 1-D bool array in order.<commit_after>
import dask import dask.array import dask_distance._compat def _bool_cmp_mtx_cnt(u, v): u = dask_distance._compat._asarray(u) v = dask_distance._compat._asarray(v) u_1 = u.astype(bool) v_1 = v.astype(bool) u_0 = ~u_1 v_0 = ~v_1 uv_11 = u_1 & v_1 uv_10 = u_1 & v_0 uv_01 = u_0 & v_1 uv_00 = u_0 & v_0 uv_11_sum = uv_11.sum(axis=0, dtype=float) uv_10_sum = uv_10.sum(axis=0, dtype=float) uv_01_sum = uv_01.sum(axis=0, dtype=float) uv_00_sum = uv_00.sum(axis=0, dtype=float) uv_cmp_mtx_cnts = dask.array.stack([ dask.array.stack([uv_00_sum, uv_01_sum]), dask.array.stack([uv_10_sum, uv_11_sum]), ]) return uv_cmp_mtx_cnts
Create a comparison matrix for 2 1-D bool arrays Simply count the number of combinations of true and false between the two 1-D arrays and sum them together. The result is then stacked into a 2-D array where the indices match the particular combination of true and false seen in each 1-D bool array in order.import dask import dask.array import dask_distance._compat def _bool_cmp_mtx_cnt(u, v): u = dask_distance._compat._asarray(u) v = dask_distance._compat._asarray(v) u_1 = u.astype(bool) v_1 = v.astype(bool) u_0 = ~u_1 v_0 = ~v_1 uv_11 = u_1 & v_1 uv_10 = u_1 & v_0 uv_01 = u_0 & v_1 uv_00 = u_0 & v_0 uv_11_sum = uv_11.sum(axis=0, dtype=float) uv_10_sum = uv_10.sum(axis=0, dtype=float) uv_01_sum = uv_01.sum(axis=0, dtype=float) uv_00_sum = uv_00.sum(axis=0, dtype=float) uv_cmp_mtx_cnts = dask.array.stack([ dask.array.stack([uv_00_sum, uv_01_sum]), dask.array.stack([uv_10_sum, uv_11_sum]), ]) return uv_cmp_mtx_cnts
<commit_before><commit_msg>Create a comparison matrix for 2 1-D bool arrays Simply count the number of combinations of true and false between the two 1-D arrays and sum them together. The result is then stacked into a 2-D array where the indices match the particular combination of true and false seen in each 1-D bool array in order.<commit_after>import dask import dask.array import dask_distance._compat def _bool_cmp_mtx_cnt(u, v): u = dask_distance._compat._asarray(u) v = dask_distance._compat._asarray(v) u_1 = u.astype(bool) v_1 = v.astype(bool) u_0 = ~u_1 v_0 = ~v_1 uv_11 = u_1 & v_1 uv_10 = u_1 & v_0 uv_01 = u_0 & v_1 uv_00 = u_0 & v_0 uv_11_sum = uv_11.sum(axis=0, dtype=float) uv_10_sum = uv_10.sum(axis=0, dtype=float) uv_01_sum = uv_01.sum(axis=0, dtype=float) uv_00_sum = uv_00.sum(axis=0, dtype=float) uv_cmp_mtx_cnts = dask.array.stack([ dask.array.stack([uv_00_sum, uv_01_sum]), dask.array.stack([uv_10_sum, uv_11_sum]), ]) return uv_cmp_mtx_cnts
f0257d91a27f7ccf04d72d1196e78e401a2c6f4e
trac/web/__init__.py
trac/web/__init__.py
# With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import *
# Workaround for http://bugs.python.org/issue6763 and # http://bugs.python.org/issue5853 thread issues import mimetypes mimetypes.init() # With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import *
Fix race condition during `mimetypes` initialization.
Fix race condition during `mimetypes` initialization. Initial patch from Steven R. Loomis. Closes #8629. git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2
Python
bsd-3-clause
moreati/trac-gitsvn,moreati/trac-gitsvn,exocad/exotrac,exocad/exotrac,moreati/trac-gitsvn,dafrito/trac-mirror,exocad/exotrac,exocad/exotrac,dafrito/trac-mirror,moreati/trac-gitsvn,dafrito/trac-mirror,dafrito/trac-mirror
# With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import * Fix race condition during `mimetypes` initialization. Initial patch from Steven R. Loomis. Closes #8629. git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2
# Workaround for http://bugs.python.org/issue6763 and # http://bugs.python.org/issue5853 thread issues import mimetypes mimetypes.init() # With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import *
<commit_before># With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import * <commit_msg>Fix race condition during `mimetypes` initialization. Initial patch from Steven R. Loomis. Closes #8629. git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>
# Workaround for http://bugs.python.org/issue6763 and # http://bugs.python.org/issue5853 thread issues import mimetypes mimetypes.init() # With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import *
# With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import * Fix race condition during `mimetypes` initialization. Initial patch from Steven R. Loomis. Closes #8629. git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2# Workaround for http://bugs.python.org/issue6763 and # http://bugs.python.org/issue5853 thread issues import mimetypes mimetypes.init() # With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import *
<commit_before># With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import * <commit_msg>Fix race condition during `mimetypes` initialization. Initial patch from Steven R. Loomis. Closes #8629. git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@9740 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after># Workaround for http://bugs.python.org/issue6763 and # http://bugs.python.org/issue5853 thread issues import mimetypes mimetypes.init() # With mod_python we'll have to delay importing trac.web.api until # modpython_frontend.handler() has been called since the # PYTHON_EGG_CACHE variable is set from there # # TODO: Remove this once the Genshi zip_safe issue has been resolved. import os from pkg_resources import get_distribution if not os.path.isdir(get_distribution('genshi').location): try: import mod_python.apache import sys if 'trac.web.modpython_frontend' in sys.modules: from trac.web.api import * except ImportError: from trac.web.api import * else: from trac.web.api import *
067a02fc5d4a432f4dc7f10cdc098b9ebdad9ccd
extract_options.py
extract_options.py
from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['cuisines'] = diners_collection.distinct('cuisine') doc['districts'] = diners_collection.distinct('address.district') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main()
from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['categories'].insert(0, 'Tất cả') doc['cuisines'] = diners_collection.distinct('cuisine') doc['cuisines'].insert(0, 'Tất cả') doc['districts'] = diners_collection.distinct('address.district') doc['districts'].insert(0, 'Tất cả') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main()
Add 'All' option to lists
Add 'All' option to lists
Python
mit
earlwlkr/POICrawler
from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['cuisines'] = diners_collection.distinct('cuisine') doc['districts'] = diners_collection.distinct('address.district') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main() Add 'All' option to lists
from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['categories'].insert(0, 'Tất cả') doc['cuisines'] = diners_collection.distinct('cuisine') doc['cuisines'].insert(0, 'Tất cả') doc['districts'] = diners_collection.distinct('address.district') doc['districts'].insert(0, 'Tất cả') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main()
<commit_before>from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['cuisines'] = diners_collection.distinct('cuisine') doc['districts'] = diners_collection.distinct('address.district') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main() <commit_msg>Add 'All' option to lists<commit_after>
from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['categories'].insert(0, 'Tất cả') doc['cuisines'] = diners_collection.distinct('cuisine') doc['cuisines'].insert(0, 'Tất cả') doc['districts'] = diners_collection.distinct('address.district') doc['districts'].insert(0, 'Tất cả') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main()
from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['cuisines'] = diners_collection.distinct('cuisine') doc['districts'] = diners_collection.distinct('address.district') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main() Add 'All' option to listsfrom pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['categories'].insert(0, 'Tất cả') doc['cuisines'] = diners_collection.distinct('cuisine') doc['cuisines'].insert(0, 'Tất cả') doc['districts'] = diners_collection.distinct('address.district') doc['districts'].insert(0, 'Tất cả') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main()
<commit_before>from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['cuisines'] = diners_collection.distinct('cuisine') doc['districts'] = diners_collection.distinct('address.district') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main() <commit_msg>Add 'All' option to lists<commit_after>from pymongo import MongoClient def main(): client = MongoClient() db = client.cityhotspots db.drop_collection('dineroptions') diners_collection = db.diners doc = {} diner_options_collection = db.dineroptions doc['categories'] = diners_collection.distinct('category') doc['categories'].insert(0, 'Tất cả') doc['cuisines'] = diners_collection.distinct('cuisine') doc['cuisines'].insert(0, 'Tất cả') doc['districts'] = diners_collection.distinct('address.district') doc['districts'].insert(0, 'Tất cả') doc['price_max'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$max": "$price_max"} } }]))[0]['value'] doc['price_min'] = list(diners_collection.aggregate([{ "$group": { "_id": None, "value": {"$min": "$price_min"} } }]))[0]['value'] diner_options_collection.insert(doc) if __name__ == '__main__': main()
370da8d3e7d879a9bdebcf594df6604905422d67
application/cases/service.py
application/cases/service.py
import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queue/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)
import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queued/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)
Change endpoint to show cases
Change endpoint to show cases
Python
mit
LandRegistry/casework-frontend-alpha,LandRegistry/casework-frontend-alpha,LandRegistry/casework-frontend-alpha,LandRegistry/casework-frontend-alpha
import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queue/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)Change endpoint to show cases
import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queued/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)
<commit_before>import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queue/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)<commit_msg>Change endpoint to show cases<commit_after>
import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queued/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)
import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queue/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)Change endpoint to show casesimport requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queued/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)
<commit_before>import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queue/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)<commit_msg>Change endpoint to show cases<commit_after>import requests import logging from flask import current_app from requests.auth import HTTPBasicAuth logging.basicConfig(level=logging.INFO) logger = logging.getLogger(__name__) logger.addHandler(logging.StreamHandler()) def get_cases(queue): url = current_app.config['CASES_URL'] + "/cases/queued/" + queue logging.info("GET_CASES for url: %s" % url) response = requests.get(url) logging.info("JSON: %s" % response.json()) return response.json() def complete_case(case_id): url = current_app.config['CASES_URL'] + "/cases/complete/"+ case_id logging.info("POST %s" % url) return requests.put(url)
8f5f342679b7f70562423346b310437ce75ad18a
doc/ext/nova_autodoc.py
doc/ext/nova_autodoc.py
import os from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0]
import gettext import os gettext.install('nova') from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0]
Fix doc building endpoint for gettext.
Fix doc building endpoint for gettext.
Python
apache-2.0
n0ano/gantt,n0ano/gantt
import os from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0] Fix doc building endpoint for gettext.
import gettext import os gettext.install('nova') from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0]
<commit_before>import os from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0] <commit_msg>Fix doc building endpoint for gettext.<commit_after>
import gettext import os gettext.install('nova') from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0]
import os from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0] Fix doc building endpoint for gettext.import gettext import os gettext.install('nova') from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0]
<commit_before>import os from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0] <commit_msg>Fix doc building endpoint for gettext.<commit_after>import gettext import os gettext.install('nova') from nova import utils def setup(app): rootdir = os.path.abspath(app.srcdir + '/..') print "**Autodocumenting from %s" % rootdir rv = utils.execute('cd %s && ./generate_autodoc_index.sh' % rootdir) print rv[0]
e1c8b1c5174df9b55c0d77dad7289dac7ea1b9e2
builder/__init__.py
builder/__init__.py
# The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack')
from builder.roles import Roles # The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') DEF_SETTINGS = { # We can't seem to alter this one more than once, # so just leave it as is... todo fix this and make it so that # we reset it... 'DATABASE_USER': DEF_USER, # Devstack will also change the root database password to this, # unsure why it desires to do that... # # This may require work... 'DATABASE_PASSWORD': DEF_PW, # This appears to be the default, leave it be... 'RABBIT_USER': 'stackrabbit', } DEF_FLAVORS = { Roles.CAP: 'm1.medium', Roles.DB: 'm1.medium', Roles.MAP: 'm1.large', Roles.RB: 'm1.medium', Roles.HV: 'm1.large', } DEF_TOPO = { 'templates': { Roles.CAP: 'cap-%(rand)s', Roles.MAP: 'map-%(rand)s', Roles.DB: 'db-%(rand)s', Roles.RB: 'rb-%(rand)s', Roles.HV: 'hv-%(rand)s', }, 'control': {}, 'compute': [], } STACK_SH = '/home/%s/devstack/stack.sh' % DEF_USER STACK_SOURCE = 'git://git.openstack.org/openstack-dev/devstack'
Move various useful constants to this file
Move various useful constants to this file
Python
apache-2.0
harlowja/multi-devstack,harlowja/multi-devstack
# The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') Move various useful constants to this file
from builder.roles import Roles # The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') DEF_SETTINGS = { # We can't seem to alter this one more than once, # so just leave it as is... todo fix this and make it so that # we reset it... 'DATABASE_USER': DEF_USER, # Devstack will also change the root database password to this, # unsure why it desires to do that... # # This may require work... 'DATABASE_PASSWORD': DEF_PW, # This appears to be the default, leave it be... 'RABBIT_USER': 'stackrabbit', } DEF_FLAVORS = { Roles.CAP: 'm1.medium', Roles.DB: 'm1.medium', Roles.MAP: 'm1.large', Roles.RB: 'm1.medium', Roles.HV: 'm1.large', } DEF_TOPO = { 'templates': { Roles.CAP: 'cap-%(rand)s', Roles.MAP: 'map-%(rand)s', Roles.DB: 'db-%(rand)s', Roles.RB: 'rb-%(rand)s', Roles.HV: 'hv-%(rand)s', }, 'control': {}, 'compute': [], } STACK_SH = '/home/%s/devstack/stack.sh' % DEF_USER STACK_SOURCE = 'git://git.openstack.org/openstack-dev/devstack'
<commit_before># The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') <commit_msg>Move various useful constants to this file<commit_after>
from builder.roles import Roles # The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') DEF_SETTINGS = { # We can't seem to alter this one more than once, # so just leave it as is... todo fix this and make it so that # we reset it... 'DATABASE_USER': DEF_USER, # Devstack will also change the root database password to this, # unsure why it desires to do that... # # This may require work... 'DATABASE_PASSWORD': DEF_PW, # This appears to be the default, leave it be... 'RABBIT_USER': 'stackrabbit', } DEF_FLAVORS = { Roles.CAP: 'm1.medium', Roles.DB: 'm1.medium', Roles.MAP: 'm1.large', Roles.RB: 'm1.medium', Roles.HV: 'm1.large', } DEF_TOPO = { 'templates': { Roles.CAP: 'cap-%(rand)s', Roles.MAP: 'map-%(rand)s', Roles.DB: 'db-%(rand)s', Roles.RB: 'rb-%(rand)s', Roles.HV: 'hv-%(rand)s', }, 'control': {}, 'compute': [], } STACK_SH = '/home/%s/devstack/stack.sh' % DEF_USER STACK_SOURCE = 'git://git.openstack.org/openstack-dev/devstack'
# The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') Move various useful constants to this filefrom builder.roles import Roles # The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') DEF_SETTINGS = { # We can't seem to alter this one more than once, # so just leave it as is... todo fix this and make it so that # we reset it... 'DATABASE_USER': DEF_USER, # Devstack will also change the root database password to this, # unsure why it desires to do that... # # This may require work... 'DATABASE_PASSWORD': DEF_PW, # This appears to be the default, leave it be... 'RABBIT_USER': 'stackrabbit', } DEF_FLAVORS = { Roles.CAP: 'm1.medium', Roles.DB: 'm1.medium', Roles.MAP: 'm1.large', Roles.RB: 'm1.medium', Roles.HV: 'm1.large', } DEF_TOPO = { 'templates': { Roles.CAP: 'cap-%(rand)s', Roles.MAP: 'map-%(rand)s', Roles.DB: 'db-%(rand)s', Roles.RB: 'rb-%(rand)s', Roles.HV: 'hv-%(rand)s', }, 'control': {}, 'compute': [], } STACK_SH = '/home/%s/devstack/stack.sh' % DEF_USER STACK_SOURCE = 'git://git.openstack.org/openstack-dev/devstack'
<commit_before># The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') <commit_msg>Move various useful constants to this file<commit_after>from builder.roles import Roles # The default stack user name and password... # # Someday make this better? DEF_USER, DEF_PW = ('stack', 'stack') DEF_SETTINGS = { # We can't seem to alter this one more than once, # so just leave it as is... todo fix this and make it so that # we reset it... 'DATABASE_USER': DEF_USER, # Devstack will also change the root database password to this, # unsure why it desires to do that... # # This may require work... 'DATABASE_PASSWORD': DEF_PW, # This appears to be the default, leave it be... 'RABBIT_USER': 'stackrabbit', } DEF_FLAVORS = { Roles.CAP: 'm1.medium', Roles.DB: 'm1.medium', Roles.MAP: 'm1.large', Roles.RB: 'm1.medium', Roles.HV: 'm1.large', } DEF_TOPO = { 'templates': { Roles.CAP: 'cap-%(rand)s', Roles.MAP: 'map-%(rand)s', Roles.DB: 'db-%(rand)s', Roles.RB: 'rb-%(rand)s', Roles.HV: 'hv-%(rand)s', }, 'control': {}, 'compute': [], } STACK_SH = '/home/%s/devstack/stack.sh' % DEF_USER STACK_SOURCE = 'git://git.openstack.org/openstack-dev/devstack'
552d1c02a46d70de92f4af4c77ce60f87d8811cc
mp3-formatter/rename_mp3.py
mp3-formatter/rename_mp3.py
#!/usr/bin/python3 import ID3 import os import sys mp3_extension = ".mp3" names = ["final_name_1", "final_name_2", "final_name_3"] files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(names): raise RuntimeError( str(len(names)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, names[i] + mp3_extension) i += 1
#!/usr/bin/python3 import ID3 import os import sys def read_tracklist(): tracklist = [] for line in sys.stdin: tracklist.append(line) return tracklist tracklist = read_tracklist() mp3_extension = ".mp3" files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(tracklist): raise RuntimeError( str(len(tracklist)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, tracklist[i] + mp3_extension) i += 1
Replace hardcoded names with tracklist
MP3: Replace hardcoded names with tracklist
Python
mit
jleung51/scripts,jleung51/scripts,jleung51/scripts
#!/usr/bin/python3 import ID3 import os import sys mp3_extension = ".mp3" names = ["final_name_1", "final_name_2", "final_name_3"] files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(names): raise RuntimeError( str(len(names)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, names[i] + mp3_extension) i += 1 MP3: Replace hardcoded names with tracklist
#!/usr/bin/python3 import ID3 import os import sys def read_tracklist(): tracklist = [] for line in sys.stdin: tracklist.append(line) return tracklist tracklist = read_tracklist() mp3_extension = ".mp3" files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(tracklist): raise RuntimeError( str(len(tracklist)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, tracklist[i] + mp3_extension) i += 1
<commit_before>#!/usr/bin/python3 import ID3 import os import sys mp3_extension = ".mp3" names = ["final_name_1", "final_name_2", "final_name_3"] files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(names): raise RuntimeError( str(len(names)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, names[i] + mp3_extension) i += 1 <commit_msg>MP3: Replace hardcoded names with tracklist<commit_after>
#!/usr/bin/python3 import ID3 import os import sys def read_tracklist(): tracklist = [] for line in sys.stdin: tracklist.append(line) return tracklist tracklist = read_tracklist() mp3_extension = ".mp3" files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(tracklist): raise RuntimeError( str(len(tracklist)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, tracklist[i] + mp3_extension) i += 1
#!/usr/bin/python3 import ID3 import os import sys mp3_extension = ".mp3" names = ["final_name_1", "final_name_2", "final_name_3"] files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(names): raise RuntimeError( str(len(names)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, names[i] + mp3_extension) i += 1 MP3: Replace hardcoded names with tracklist#!/usr/bin/python3 import ID3 import os import sys def read_tracklist(): tracklist = [] for line in sys.stdin: tracklist.append(line) return tracklist tracklist = read_tracklist() mp3_extension = ".mp3" files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(tracklist): raise RuntimeError( str(len(tracklist)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, tracklist[i] + mp3_extension) i += 1
<commit_before>#!/usr/bin/python3 import ID3 import os import sys mp3_extension = ".mp3" names = ["final_name_1", "final_name_2", "final_name_3"] files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(names): raise RuntimeError( str(len(names)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, names[i] + mp3_extension) i += 1 <commit_msg>MP3: Replace hardcoded names with tracklist<commit_after>#!/usr/bin/python3 import ID3 import os import sys def read_tracklist(): tracklist = [] for line in sys.stdin: tracklist.append(line) return tracklist tracklist = read_tracklist() mp3_extension = ".mp3" files_all = os.listdir('.') files = [] for f in files_all: # Prune directories if not os.path.isfile(f): continue # Prune non-MP3 files filename, extension = os.path.splitext(f) if extension != mp3_extension: continue # Prune this file f_temp = os.path.abspath(f) if f_temp == os.path.abspath(__file__): continue files.append(f) if len(files) != len(tracklist): raise RuntimeError( str(len(tracklist)) + " file names were given but " + str(len(files)) + " files were found.") sys.exit() files.sort() i = 0 for f in files: os.rename(f, tracklist[i] + mp3_extension) i += 1
fb3db1196a48199bc388f97f451098a530822ca7
ecs/models.py
ecs/models.py
"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __str__(self): """Stringify. :return: GUID as a string :rtype: :class:`str` """ return str(self._guid) def __hash__(self): """Hash function for this object. :return: the hash value :rtype: :class:`int` """ return self._guid def __eq__(self, other): """Equality method. :param other: other entity :type other: :class:`Entity` :return: ``True`` if equal :rtype: :class:`bool` """ return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt))
"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __repr__(self): return '{0}({1})'.format(type(self).__name__, self._guid) def __hash__(self): return self._guid def __eq__(self, other): return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt))
Remove unnecessary model docstrings, add repr.
Remove unnecessary model docstrings, add repr.
Python
mit
seanfisk/ecs,seanfisk/ecs
"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __str__(self): """Stringify. :return: GUID as a string :rtype: :class:`str` """ return str(self._guid) def __hash__(self): """Hash function for this object. :return: the hash value :rtype: :class:`int` """ return self._guid def __eq__(self, other): """Equality method. :param other: other entity :type other: :class:`Entity` :return: ``True`` if equal :rtype: :class:`bool` """ return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt)) Remove unnecessary model docstrings, add repr.
"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __repr__(self): return '{0}({1})'.format(type(self).__name__, self._guid) def __hash__(self): return self._guid def __eq__(self, other): return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt))
<commit_before>"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __str__(self): """Stringify. :return: GUID as a string :rtype: :class:`str` """ return str(self._guid) def __hash__(self): """Hash function for this object. :return: the hash value :rtype: :class:`int` """ return self._guid def __eq__(self, other): """Equality method. :param other: other entity :type other: :class:`Entity` :return: ``True`` if equal :rtype: :class:`bool` """ return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt)) <commit_msg>Remove unnecessary model docstrings, add repr.<commit_after>
"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __repr__(self): return '{0}({1})'.format(type(self).__name__, self._guid) def __hash__(self): return self._guid def __eq__(self, other): return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt))
"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __str__(self): """Stringify. :return: GUID as a string :rtype: :class:`str` """ return str(self._guid) def __hash__(self): """Hash function for this object. :return: the hash value :rtype: :class:`int` """ return self._guid def __eq__(self, other): """Equality method. :param other: other entity :type other: :class:`Entity` :return: ``True`` if equal :rtype: :class:`bool` """ return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt)) Remove unnecessary model docstrings, add repr."""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __repr__(self): return '{0}({1})'.format(type(self).__name__, self._guid) def __hash__(self): return self._guid def __eq__(self, other): return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt))
<commit_before>"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __str__(self): """Stringify. :return: GUID as a string :rtype: :class:`str` """ return str(self._guid) def __hash__(self): """Hash function for this object. :return: the hash value :rtype: :class:`int` """ return self._guid def __eq__(self, other): """Equality method. :param other: other entity :type other: :class:`Entity` :return: ``True`` if equal :rtype: :class:`bool` """ return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt)) <commit_msg>Remove unnecessary model docstrings, add repr.<commit_after>"""Entity, Component, and System classes.""" from __future__ import print_function from abc import ABCMeta, abstractmethod class Entity(object): """Encapsulation of a GUID to use in the entity database.""" def __init__(self, guid): """:param guid: globally unique identifier :type guid: :class:`int` """ self._guid = guid def __repr__(self): return '{0}({1})'.format(type(self).__name__, self._guid) def __hash__(self): return self._guid def __eq__(self, other): return self._guid == hash(other) class Component(object): """Class from which all components should derive.""" pass class System(object): """An object that represents an operation on a set of objects from the game database. The :meth:`update` method must be implemented. """ __metaclass__ = ABCMeta @abstractmethod def update(self, dt): """Run the system for this frame. This method is called by the system manager, and is where the functionality of the system is implemented. :param dt: delta time, or elapsed time for this frame :type dt: :class:`float` """ print("System's update() method was called " "with time delta of {}".format(dt))
1803aad79b8adfe3df1de7f7a4facee0c79f1d57
test/__init__.py
test/__init__.py
""" MongoAlchemy tests package. There's a bunch of tests here, man. """ from util import get_session def setup(): """ Destroy the whole damn database before running tests! WOO! We would do this afterwards, but sometimes you might need to do a post-mortem on the state of the database. """ with get_session() as s: colls = s.db.collection_names() for coll in colls: if coll == 'system.indexes': continue s.db[coll].drop_indexes() s.db[coll].drop() s.db.command({'dropDatabase':1})
Create test.setup() to tear down the database before starting tests.
Create test.setup() to tear down the database before starting tests.
Python
mit
shakefu/MongoAlchemy,shakefu/MongoAlchemy,shakefu/MongoAlchemy
Create test.setup() to tear down the database before starting tests.
""" MongoAlchemy tests package. There's a bunch of tests here, man. """ from util import get_session def setup(): """ Destroy the whole damn database before running tests! WOO! We would do this afterwards, but sometimes you might need to do a post-mortem on the state of the database. """ with get_session() as s: colls = s.db.collection_names() for coll in colls: if coll == 'system.indexes': continue s.db[coll].drop_indexes() s.db[coll].drop() s.db.command({'dropDatabase':1})
<commit_before><commit_msg>Create test.setup() to tear down the database before starting tests.<commit_after>
""" MongoAlchemy tests package. There's a bunch of tests here, man. """ from util import get_session def setup(): """ Destroy the whole damn database before running tests! WOO! We would do this afterwards, but sometimes you might need to do a post-mortem on the state of the database. """ with get_session() as s: colls = s.db.collection_names() for coll in colls: if coll == 'system.indexes': continue s.db[coll].drop_indexes() s.db[coll].drop() s.db.command({'dropDatabase':1})
Create test.setup() to tear down the database before starting tests.""" MongoAlchemy tests package. There's a bunch of tests here, man. """ from util import get_session def setup(): """ Destroy the whole damn database before running tests! WOO! We would do this afterwards, but sometimes you might need to do a post-mortem on the state of the database. """ with get_session() as s: colls = s.db.collection_names() for coll in colls: if coll == 'system.indexes': continue s.db[coll].drop_indexes() s.db[coll].drop() s.db.command({'dropDatabase':1})
<commit_before><commit_msg>Create test.setup() to tear down the database before starting tests.<commit_after>""" MongoAlchemy tests package. There's a bunch of tests here, man. """ from util import get_session def setup(): """ Destroy the whole damn database before running tests! WOO! We would do this afterwards, but sometimes you might need to do a post-mortem on the state of the database. """ with get_session() as s: colls = s.db.collection_names() for coll in colls: if coll == 'system.indexes': continue s.db[coll].drop_indexes() s.db[coll].drop() s.db.command({'dropDatabase':1})
761d74c25fc54cba0c160380d17ab0ca14838dc9
helpers/suggestions/match_suggestion_accepter.py
helpers/suggestions/match_suggestion_accepter.py
from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match
from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): if (len(suggestions) < 1): return None matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match
Fix problem when not accepting any suggestions.
Fix problem when not accepting any suggestions.
Python
mit
the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,synth3tk/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,phil-lopreiato/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,1fish2/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,jaredhasenklein/the-blue-alliance
from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match Fix problem when not accepting any suggestions.
from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): if (len(suggestions) < 1): return None matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match
<commit_before>from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match <commit_msg>Fix problem when not accepting any suggestions.<commit_after>
from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): if (len(suggestions) < 1): return None matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match
from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match Fix problem when not accepting any suggestions.from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): if (len(suggestions) < 1): return None matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match
<commit_before>from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match <commit_msg>Fix problem when not accepting any suggestions.<commit_after>from helpers.match_manipulator import MatchManipulator from models.match import Match class MatchSuggestionAccepter(object): """ Handle accepting Match suggestions. """ @classmethod def accept_suggestions(self, suggestions): if (len(suggestions) < 1): return None matches = map(lambda match_future: match_future.get_result(), [Match.get_by_id_async(suggestion.target_key) for suggestion in suggestions]) pairs = zip(matches, suggestions) for match, suggestion in pairs: self._accept_suggestion(match, suggestion) matches, suggestions = zip(*pairs) matches = MatchManipulator.createOrUpdate(list(matches)) return matches @classmethod def _accept_suggestion(self, match, suggestion): if "youtube_videos" in suggestion.contents: match = self._merge_youtube_videos(match, suggestion.contents["youtube_videos"]) return match @classmethod def _merge_youtube_videos(self, match, youtube_videos): for youtube_video in youtube_videos: if youtube_video not in match.youtube_videos: match.youtube_videos.append(youtube_video) match.dirty = True # This is so hacky. -gregmarra 20130601 return match
e513e41dd10df009a3db7641774db1acba60a301
tensormate/graph/__init__.py
tensormate/graph/__init__.py
from tensormate.graph.base import * from tensormate.graph.data_pipeline import *
from tensormate.graph.base import * from tensormate.graph.data_pipeline import * from tensormate.graph.image_graph import *
Add an access from graph
Add an access from graph
Python
apache-2.0
songgc/tensormate
from tensormate.graph.base import * from tensormate.graph.data_pipeline import * Add an access from graph
from tensormate.graph.base import * from tensormate.graph.data_pipeline import * from tensormate.graph.image_graph import *
<commit_before>from tensormate.graph.base import * from tensormate.graph.data_pipeline import * <commit_msg>Add an access from graph<commit_after>
from tensormate.graph.base import * from tensormate.graph.data_pipeline import * from tensormate.graph.image_graph import *
from tensormate.graph.base import * from tensormate.graph.data_pipeline import * Add an access from graphfrom tensormate.graph.base import * from tensormate.graph.data_pipeline import * from tensormate.graph.image_graph import *
<commit_before>from tensormate.graph.base import * from tensormate.graph.data_pipeline import * <commit_msg>Add an access from graph<commit_after>from tensormate.graph.base import * from tensormate.graph.data_pipeline import * from tensormate.graph.image_graph import *
ab342b6a90c34abbfc49c72a9f72251d238589b3
git_helper.py
git_helper.py
import os def git_file_path(view, git_path): if not git_path: return False full_file_path = view.file_name() git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git')
import os def git_file_path(view, git_path): if not git_path: return False full_file_path = os.path.realpath(view.file_name()) git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git')
Resolve symbolic links for filename in Sublime view - without this all files available through symlinks are considered as new to git
Resolve symbolic links for filename in Sublime view - without this all files available through symlinks are considered as new to git
Python
mit
robfrawley/sublime-git-gutter,jisaacks/GitGutter,robfrawley/sublime-git-gutter,akpersad/GitGutter,ariofrio/VcsGutter,natecavanaugh/GitGutter,biodamasceno/GitGutter,tushortz/GitGutter,michaelhogg/GitGutter,bradsokol/VcsGutter,tushortz/GitGutter,michaelhogg/GitGutter,natecavanaugh/GitGutter,akpersad/GitGutter,robfrawley/sublime-git-gutter,bradsokol/VcsGutter,natecavanaugh/GitGutter,akpersad/GitGutter,michaelhogg/GitGutter,akpersad/GitGutter,ariofrio/VcsGutter,natecavanaugh/GitGutter,tushortz/GitGutter,michaelhogg/GitGutter,biodamasceno/GitGutter,tushortz/GitGutter,biodamasceno/GitGutter,robfrawley/sublime-git-gutter,biodamasceno/GitGutter
import os def git_file_path(view, git_path): if not git_path: return False full_file_path = view.file_name() git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git') Resolve symbolic links for filename in Sublime view - without this all files available through symlinks are considered as new to git
import os def git_file_path(view, git_path): if not git_path: return False full_file_path = os.path.realpath(view.file_name()) git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git')
<commit_before>import os def git_file_path(view, git_path): if not git_path: return False full_file_path = view.file_name() git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git') <commit_msg>Resolve symbolic links for filename in Sublime view - without this all files available through symlinks are considered as new to git<commit_after>
import os def git_file_path(view, git_path): if not git_path: return False full_file_path = os.path.realpath(view.file_name()) git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git')
import os def git_file_path(view, git_path): if not git_path: return False full_file_path = view.file_name() git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git') Resolve symbolic links for filename in Sublime view - without this all files available through symlinks are considered as new to gitimport os def git_file_path(view, git_path): if not git_path: return False full_file_path = os.path.realpath(view.file_name()) git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git')
<commit_before>import os def git_file_path(view, git_path): if not git_path: return False full_file_path = view.file_name() git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git') <commit_msg>Resolve symbolic links for filename in Sublime view - without this all files available through symlinks are considered as new to git<commit_after>import os def git_file_path(view, git_path): if not git_path: return False full_file_path = os.path.realpath(view.file_name()) git_path_to_file = full_file_path.replace(git_path,'') if git_path_to_file[0] == "/": git_path_to_file = git_path_to_file[1:] return git_path_to_file def git_root(directory): if os.path.exists(os.path.join(directory, '.git')): return directory else: parent = os.path.realpath(os.path.join(directory, os.path.pardir)) if parent == directory: # we have reached root dir return False else: return git_root(parent) def git_tree(view): full_file_path = view.file_name() file_parent_dir = os.path.realpath(os.path.dirname(full_file_path)) return git_root(file_parent_dir) def git_dir(directory): if not directory: return False return os.path.join(directory, '.git')
7b8fc55b9c27b3001709b65b373b37394a9b332e
capstone/player/monte_carlo.py
capstone/player/monte_carlo.py
import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=100): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game)
import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=1000): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game)
Increase default MonteCarlo n_sims from 100 to 1000
Increase default MonteCarlo n_sims from 100 to 1000
Python
mit
davidrobles/mlnd-capstone-code
import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=100): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game) Increase default MonteCarlo n_sims from 100 to 1000
import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=1000): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game)
<commit_before>import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=100): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game) <commit_msg>Increase default MonteCarlo n_sims from 100 to 1000<commit_after>
import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=1000): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game)
import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=100): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game) Increase default MonteCarlo n_sims from 100 to 1000import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=1000): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game)
<commit_before>import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=100): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game) <commit_msg>Increase default MonteCarlo n_sims from 100 to 1000<commit_after>import random from collections import defaultdict, Counter from . import Player from ..util import utility class MonteCarlo(Player): name = 'MonteCarlo' def __init__(self, n_sims=1000): self.n_sims = n_sims def __repr__(self): return type(self).name def __str__(self): return type(self).name def move(self, game): counter = defaultdict(int) for i in range(self.n_sims): for move in game.legal_moves(): new_game = game.copy() new_game.make_move(move) while not new_game.is_over(): rand_move = random.choice(new_game.legal_moves()) new_game.make_move(rand_move) counter[move] += utility(new_game, game.cur_player()) m = Counter(counter).most_common(1) return m[0][0] ########## # Player # ########## def choose_move(self, game): return self.move(game)
5d5e9ff082eb6f277270045618812c4b2c49daab
31-trinity/tf-31.py
31-trinity/tf-31.py
#!/usr/bin/env python import sys, re, operator, collections # # Model # class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) # # View # class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c # # Controller # class WordFrequencyController: def __init__(self, model, view): self._model = model self._view = view view.render() # # Main # m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v)
#!/usr/bin/env python import sys, re, operator, collections class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): self.update(path_to_file) def update(self, path_to_file): try: stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) except IOError: print "File not found" self.freqs = {} class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c class WordFrequencyController: def __init__(self, model, view): self._model, self._view = model, view view.render() def run(self): while True: print "Next file: " sys.stdout.flush() filename = sys.stdin.readline().strip() self._model.update(filename) self._view.render() m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) c.run()
Make the mvc example interactive
Make the mvc example interactive
Python
mit
alex-quiterio/exercises-in-programming-style,alex-quiterio/exercises-in-programming-style,alex-quiterio/exercises-in-programming-style,alex-quiterio/exercises-in-programming-style,alex-quiterio/exercises-in-programming-style
#!/usr/bin/env python import sys, re, operator, collections # # Model # class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) # # View # class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c # # Controller # class WordFrequencyController: def __init__(self, model, view): self._model = model self._view = view view.render() # # Main # m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) Make the mvc example interactive
#!/usr/bin/env python import sys, re, operator, collections class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): self.update(path_to_file) def update(self, path_to_file): try: stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) except IOError: print "File not found" self.freqs = {} class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c class WordFrequencyController: def __init__(self, model, view): self._model, self._view = model, view view.render() def run(self): while True: print "Next file: " sys.stdout.flush() filename = sys.stdin.readline().strip() self._model.update(filename) self._view.render() m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) c.run()
<commit_before>#!/usr/bin/env python import sys, re, operator, collections # # Model # class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) # # View # class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c # # Controller # class WordFrequencyController: def __init__(self, model, view): self._model = model self._view = view view.render() # # Main # m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) <commit_msg>Make the mvc example interactive<commit_after>
#!/usr/bin/env python import sys, re, operator, collections class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): self.update(path_to_file) def update(self, path_to_file): try: stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) except IOError: print "File not found" self.freqs = {} class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c class WordFrequencyController: def __init__(self, model, view): self._model, self._view = model, view view.render() def run(self): while True: print "Next file: " sys.stdout.flush() filename = sys.stdin.readline().strip() self._model.update(filename) self._view.render() m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) c.run()
#!/usr/bin/env python import sys, re, operator, collections # # Model # class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) # # View # class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c # # Controller # class WordFrequencyController: def __init__(self, model, view): self._model = model self._view = view view.render() # # Main # m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) Make the mvc example interactive#!/usr/bin/env python import sys, re, operator, collections class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): self.update(path_to_file) def update(self, path_to_file): try: stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) except IOError: print "File not found" self.freqs = {} class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c class WordFrequencyController: def __init__(self, model, view): self._model, self._view = model, view view.render() def run(self): while True: print "Next file: " sys.stdout.flush() filename = sys.stdin.readline().strip() self._model.update(filename) self._view.render() m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) c.run()
<commit_before>#!/usr/bin/env python import sys, re, operator, collections # # Model # class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) # # View # class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c # # Controller # class WordFrequencyController: def __init__(self, model, view): self._model = model self._view = view view.render() # # Main # m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) <commit_msg>Make the mvc example interactive<commit_after>#!/usr/bin/env python import sys, re, operator, collections class WordFrequenciesModel: """ Models the data. In this case, we're only interested in words and their frequencies as an end result """ freqs = {} def __init__(self, path_to_file): self.update(path_to_file) def update(self, path_to_file): try: stopwords = set(open('../stop_words.txt').read().split(',')) words = re.findall('[a-z]{2,}', open(path_to_file).read().lower()) self.freqs = collections.Counter(w for w in words if w not in stopwords) except IOError: print "File not found" self.freqs = {} class WordFrequenciesView: def __init__(self, model): self._model = model def render(self): sorted_freqs = sorted(self._model.freqs.iteritems(), key=operator.itemgetter(1), reverse=True) for (w, c) in sorted_freqs[:25]: print w, '-', c class WordFrequencyController: def __init__(self, model, view): self._model, self._view = model, view view.render() def run(self): while True: print "Next file: " sys.stdout.flush() filename = sys.stdin.readline().strip() self._model.update(filename) self._view.render() m = WordFrequenciesModel(sys.argv[1]) v = WordFrequenciesView(m) c = WordFrequencyController(m, v) c.run()
78e6aa845fac79d8b0d015840897db1fef5f06d0
polygon2geojson.py
polygon2geojson.py
import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data): geojson_filename = 'activity.geojson' remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename)
import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data, polygon_filename): geojson_filename = polygon_filename.split('.')[0] + ".geojson" remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates, polygon_filename) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename)
Use output name from input name
Use output name from input name
Python
unlicense
ustroetz/polygon2osm
import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data): geojson_filename = 'activity.geojson' remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename) Use output name from input name
import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data, polygon_filename): geojson_filename = polygon_filename.split('.')[0] + ".geojson" remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates, polygon_filename) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename)
<commit_before>import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data): geojson_filename = 'activity.geojson' remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename) <commit_msg>Use output name from input name<commit_after>
import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data, polygon_filename): geojson_filename = polygon_filename.split('.')[0] + ".geojson" remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates, polygon_filename) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename)
import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data): geojson_filename = 'activity.geojson' remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename) Use output name from input nameimport argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data, polygon_filename): geojson_filename = polygon_filename.split('.')[0] + ".geojson" remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates, polygon_filename) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename)
<commit_before>import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data): geojson_filename = 'activity.geojson' remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename) <commit_msg>Use output name from input name<commit_after>import argparse import os import fiona from shapely.geometry import Polygon, mapping def remove_file(file_name): try: os.remove(file_name) except OSError: pass def read_polygon(polygon_filename): with open(polygon_filename) as f: return f.readlines() def clean_poylgon(polygon_data): coordinates = polygon_data[2:][:-2] coordinates = [item.split(' ') for item in coordinates] coordinates = [filter(None, item) for item in coordinates] coordinates = [(float(item[0]), float(item[1])) for item in coordinates] return coordinates def write_geojson(data, polygon_filename): geojson_filename = polygon_filename.split('.')[0] + ".geojson" remove_file(geojson_filename) schema = {'geometry': 'Polygon','properties': {}} with fiona.open(geojson_filename, 'w', 'GeoJSON', schema) as output: output.write({'geometry':mapping(Polygon(data)), 'properties':{}}) def main(polygon_filename): polygon_data = read_polygon(polygon_filename) coordinates = clean_poylgon(polygon_data) write_geojson(coordinates, polygon_filename) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument("polygon_filename") args = parser.parse_args() main(args.polygon_filename)
2b7da7ba1ae2eac069762c221c279aa9f204775d
praw/exceptions.py
praw/exceptions.py
"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note: Calling `str()` on the instance returns `unicode_escape`d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the `message` atribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API."""
"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note:: Calling ``str()`` on the instance returns ``unicode_escape``-d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the ``message`` attribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API."""
Fix a few Sphinx typos
Fix a few Sphinx typos * `.. note:` -> `.. note::` to prevent the `note` from being interpreted as a comment, which wouldn't show up when the docs are rendered. * Double backticks for the code bits. * Correct typo ("atribute" -> "attribute"). * Sphinx doesn't like characters immediately after the backticks, so add a hyphen in to prevent it from being rendered incorrectly.
Python
bsd-2-clause
gschizas/praw,praw-dev/praw,13steinj/praw,leviroth/praw,gschizas/praw,praw-dev/praw,13steinj/praw,leviroth/praw
"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note: Calling `str()` on the instance returns `unicode_escape`d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the `message` atribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API.""" Fix a few Sphinx typos * `.. note:` -> `.. note::` to prevent the `note` from being interpreted as a comment, which wouldn't show up when the docs are rendered. * Double backticks for the code bits. * Correct typo ("atribute" -> "attribute"). * Sphinx doesn't like characters immediately after the backticks, so add a hyphen in to prevent it from being rendered incorrectly.
"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note:: Calling ``str()`` on the instance returns ``unicode_escape``-d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the ``message`` attribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API."""
<commit_before>"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note: Calling `str()` on the instance returns `unicode_escape`d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the `message` atribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API.""" <commit_msg>Fix a few Sphinx typos * `.. note:` -> `.. note::` to prevent the `note` from being interpreted as a comment, which wouldn't show up when the docs are rendered. * Double backticks for the code bits. * Correct typo ("atribute" -> "attribute"). * Sphinx doesn't like characters immediately after the backticks, so add a hyphen in to prevent it from being rendered incorrectly.<commit_after>
"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note:: Calling ``str()`` on the instance returns ``unicode_escape``-d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the ``message`` attribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API."""
"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note: Calling `str()` on the instance returns `unicode_escape`d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the `message` atribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API.""" Fix a few Sphinx typos * `.. note:` -> `.. note::` to prevent the `note` from being interpreted as a comment, which wouldn't show up when the docs are rendered. * Double backticks for the code bits. * Correct typo ("atribute" -> "attribute"). * Sphinx doesn't like characters immediately after the backticks, so add a hyphen in to prevent it from being rendered incorrectly."""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note:: Calling ``str()`` on the instance returns ``unicode_escape``-d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the ``message`` attribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API."""
<commit_before>"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note: Calling `str()` on the instance returns `unicode_escape`d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the `message` atribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API.""" <commit_msg>Fix a few Sphinx typos * `.. note:` -> `.. note::` to prevent the `note` from being interpreted as a comment, which wouldn't show up when the docs are rendered. * Double backticks for the code bits. * Correct typo ("atribute" -> "attribute"). * Sphinx doesn't like characters immediately after the backticks, so add a hyphen in to prevent it from being rendered incorrectly.<commit_after>"""PRAW exception classes. Includes two main exceptions: :class:`.APIException` for when something goes wrong on the server side, and :class:`.ClientException` when something goes wrong on the client side. Both of these classes extend :class:`.PRAWException`. """ class PRAWException(Exception): """The base PRAW Exception that all other exception classes extend.""" class APIException(PRAWException): """Indicate exception that involve responses from Reddit's API.""" def __init__(self, error_type, message, field): """Initialize an instance of APIException. :param error_type: The error type set on Reddit's end. :param message: The associated message for the error. :param field: The input field associated with the error if available. .. note:: Calling ``str()`` on the instance returns ``unicode_escape``-d ASCII string because the message may be localized and may contain UNICODE characters. If you want a non-escaped message, access the ``message`` attribute on the instance. """ error_str = u'{}: \'{}\''.format(error_type, message) if field: error_str += u' on field \'{}\''.format(field) error_str = error_str.encode('unicode_escape').decode('ascii') super(APIException, self).__init__(error_str) self.error_type = error_type self.message = message self.field = field class ClientException(PRAWException): """Indicate exceptions that don't involve interaction with Reddit's API."""
c19b161442484e1ad58ac5bb1abc074be9e5ac7f
test/testproject/settings.py
test/testproject/settings.py
import os DEBUG = True DATABASE_ENGINE = 'sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/'
import os DEBUG = True # Django 1.2 up DATABASE_ENGINE = 'django.db.backends.sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') # Django 1.1 and 1.0 DATABASES = { 'default': { 'ENGINE': DATABASE_ENGINE, 'NAME': DATABASE_NAME } } ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/'
Fix deprecation warning when running tests with Django 1.3.
Fix deprecation warning when running tests with Django 1.3.
Python
bsd-3-clause
mila/django-urldecorators,mila/django-urldecorators
import os DEBUG = True DATABASE_ENGINE = 'sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/' Fix deprecation warning when running tests with Django 1.3.
import os DEBUG = True # Django 1.2 up DATABASE_ENGINE = 'django.db.backends.sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') # Django 1.1 and 1.0 DATABASES = { 'default': { 'ENGINE': DATABASE_ENGINE, 'NAME': DATABASE_NAME } } ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/'
<commit_before>import os DEBUG = True DATABASE_ENGINE = 'sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/' <commit_msg>Fix deprecation warning when running tests with Django 1.3.<commit_after>
import os DEBUG = True # Django 1.2 up DATABASE_ENGINE = 'django.db.backends.sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') # Django 1.1 and 1.0 DATABASES = { 'default': { 'ENGINE': DATABASE_ENGINE, 'NAME': DATABASE_NAME } } ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/'
import os DEBUG = True DATABASE_ENGINE = 'sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/' Fix deprecation warning when running tests with Django 1.3. import os DEBUG = True # Django 1.2 up DATABASE_ENGINE = 'django.db.backends.sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') # Django 1.1 and 1.0 DATABASES = { 'default': { 'ENGINE': DATABASE_ENGINE, 'NAME': DATABASE_NAME } } ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/'
<commit_before>import os DEBUG = True DATABASE_ENGINE = 'sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/' <commit_msg>Fix deprecation warning when running tests with Django 1.3.<commit_after> import os DEBUG = True # Django 1.2 up DATABASE_ENGINE = 'django.db.backends.sqlite3' DATABASE_NAME = os.path.join(os.path.dirname(__file__), 'test.db') # Django 1.1 and 1.0 DATABASES = { 'default': { 'ENGINE': DATABASE_ENGINE, 'NAME': DATABASE_NAME } } ROOT_URLCONF = 'testproject.urls' TEMPLATE_DIRS = ( os.path.join(os.path.dirname(__file__), 'templates'), ) INSTALLED_APPS = ( 'django.contrib.auth', 'django.contrib.sessions', 'django.contrib.contenttypes', # Required for auth 'django.contrib.admin', # Required by django.contrib.auth tests 'django.contrib.sites', # Required by django.contrib.auth tests 'urldecorators', # Optional, only for Django test runner ) SITE_ID = 1 LOGIN_URL = '/login/'
1897daadd1ea435b381c37f845545fad4becc578
censusreporter/config/prod/settings.py
censusreporter/config/prod/settings.py
from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), } }
from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), # This library defaults to using db 1, and I want it in db 0 'OPTIONS': { 'DB': 0, }, } }
Use the same Redis DB as census-api to make stats collection easier
Use the same Redis DB as census-api to make stats collection easier
Python
mit
censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter,censusreporter/censusreporter
from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), } } Use the same Redis DB as census-api to make stats collection easier
from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), # This library defaults to using db 1, and I want it in db 0 'OPTIONS': { 'DB': 0, }, } }
<commit_before>from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), } } <commit_msg>Use the same Redis DB as census-api to make stats collection easier<commit_after>
from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), # This library defaults to using db 1, and I want it in db 0 'OPTIONS': { 'DB': 0, }, } }
from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), } } Use the same Redis DB as census-api to make stats collection easierfrom censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), # This library defaults to using db 1, and I want it in db 0 'OPTIONS': { 'DB': 0, }, } }
<commit_before>from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), } } <commit_msg>Use the same Redis DB as census-api to make stats collection easier<commit_after>from censusreporter.config.base.settings import * import os DEBUG = False ROOT_URLCONF = 'censusreporter.config.prod.urls' WSGI_APPLICATION = "censusreporter.config.prod.wsgi.application" ALLOWED_HOSTS = ['*'] CACHES = { 'default': { 'BACKEND': 'redis_cache.RedisCache', 'LOCATION': os.environ.get('REDIS_URL', ''), # This library defaults to using db 1, and I want it in db 0 'OPTIONS': { 'DB': 0, }, } }
ca1b92118d0c432484b3ac7f59924a1a65a59e17
irco/utils.py
irco/utils.py
import os import glob from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in glob.glob(os.path.join(source, '*.txt')): yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset()
import os from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in sorted(os.listdir(source)): _, ext = os.path.splitext(path) if ext not in ('.txt', '.csv', '.tsv'): continue path = os.path.join(source, path) yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset()
Support recursive import of CSV and TSV files as well as TXT ones.
Support recursive import of CSV and TSV files as well as TXT ones.
Python
mit
GaretJax/irco,GaretJax/irco,GaretJax/irco,GaretJax/irco
import os import glob from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in glob.glob(os.path.join(source, '*.txt')): yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset() Support recursive import of CSV and TSV files as well as TXT ones.
import os from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in sorted(os.listdir(source)): _, ext = os.path.splitext(path) if ext not in ('.txt', '.csv', '.tsv'): continue path = os.path.join(source, path) yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset()
<commit_before>import os import glob from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in glob.glob(os.path.join(source, '*.txt')): yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset() <commit_msg>Support recursive import of CSV and TSV files as well as TXT ones.<commit_after>
import os from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in sorted(os.listdir(source)): _, ext = os.path.splitext(path) if ext not in ('.txt', '.csv', '.tsv'): continue path = os.path.join(source, path) yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset()
import os import glob from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in glob.glob(os.path.join(source, '*.txt')): yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset() Support recursive import of CSV and TSV files as well as TXT ones.import os from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in sorted(os.listdir(source)): _, ext = os.path.splitext(path) if ext not in ('.txt', '.csv', '.tsv'): continue path = os.path.join(source, path) yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset()
<commit_before>import os import glob from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in glob.glob(os.path.join(source, '*.txt')): yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset() <commit_msg>Support recursive import of CSV and TSV files as well as TXT ones.<commit_after>import os from irco import parser, tabular def get_file_list(sources): for source in sources: if os.path.isdir(source): for path in sorted(os.listdir(source)): _, ext = os.path.splitext(path) if ext not in ('.txt', '.csv', '.tsv'): continue path = os.path.join(source, path) yield path elif os.path.isfile(source): yield source def get_dataset(source, records=None): table = tabular.Table(notset=None) for path in get_file_list(source): with open(path) as fh: for record in parser.parse(fh, records): table.add(record) return table.dataset()
bc6392560ea87c74d6c6a94812b6caba7d6c2954
django_elect/settings.py
django_elect/settings.py
from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', 'auth.User') """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/')
from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', settings.AUTH_USER_MODEL) """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/')
Change DJANGO_ELECT_USER_MODEL to default to AUTH_USER_MODEL
Change DJANGO_ELECT_USER_MODEL to default to AUTH_USER_MODEL
Python
bsd-3-clause
MasonM/django-elect,MasonM/django-elect,MasonM/django-elect
from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', 'auth.User') """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/') Change DJANGO_ELECT_USER_MODEL to default to AUTH_USER_MODEL
from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', settings.AUTH_USER_MODEL) """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/')
<commit_before>from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', 'auth.User') """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/') <commit_msg>Change DJANGO_ELECT_USER_MODEL to default to AUTH_USER_MODEL<commit_after>
from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', settings.AUTH_USER_MODEL) """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/')
from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', 'auth.User') """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/') Change DJANGO_ELECT_USER_MODEL to default to AUTH_USER_MODELfrom django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', settings.AUTH_USER_MODEL) """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/')
<commit_before>from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', 'auth.User') """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/') <commit_msg>Change DJANGO_ELECT_USER_MODEL to default to AUTH_USER_MODEL<commit_after>from django.conf import settings """ A string that corresponds to the path to the model that should be used for the Election.allowed_voters and Vote.account foreign keys. This is mainly for sites that extend the User model via inheritance, as detailed at http://scottbarnham.com/blog/2008/08/21/extending-the-django-user-model-with-inheritance/ """ DJANGO_ELECT_USER_MODEL = getattr(settings, 'DJANGO_ELECT_USER_MODEL', settings.AUTH_USER_MODEL) """ List of tuples to pass to Migration.depedencies for django_elect migrations """ DJANGO_ELECT_MIGRATION_DEPENDENCIES = getattr(settings, 'DJANGO_ELECT_MIGRATION_DEPENDENCIES', [('auth', '0001_initial')]) """ URL to redirect voters to who are not logged in. """ LOGIN_URL = getattr(settings, 'LOGIN_URL', '/account/')
dcaf7b95264c0d8678bc36e47a14fa6f15175e40
pylearn2/tests/test_dbm_metrics.py
pylearn2/tests/test_dbm_metrics.py
""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll
""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation by comparing the output of estimate_likelihood to Russ's code's output for the same parameters. """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll
Add more info to test_ais docstring
Add more info to test_ais docstring
Python
bsd-3-clause
woozzu/pylearn2,JesseLivezey/plankton,fulmicoton/pylearn2,caidongyun/pylearn2,sandeepkbhat/pylearn2,cosmoharrigan/pylearn2,mkraemer67/pylearn2,ddboline/pylearn2,kastnerkyle/pylearn2,pkainz/pylearn2,lisa-lab/pylearn2,hantek/pylearn2,bartvm/pylearn2,mclaughlin6464/pylearn2,aalmah/pylearn2,fishcorn/pylearn2,fyffyt/pylearn2,sandeepkbhat/pylearn2,pombredanne/pylearn2,lunyang/pylearn2,daemonmaker/pylearn2,sandeepkbhat/pylearn2,lancezlin/pylearn2,fishcorn/pylearn2,pkainz/pylearn2,ashhher3/pylearn2,JesseLivezey/pylearn2,skearnes/pylearn2,theoryno3/pylearn2,alexjc/pylearn2,TNick/pylearn2,bartvm/pylearn2,hyqneuron/pylearn2-maxsom,CIFASIS/pylearn2,ashhher3/pylearn2,abergeron/pylearn2,matrogers/pylearn2,jeremyfix/pylearn2,theoryno3/pylearn2,mkraemer67/pylearn2,kose-y/pylearn2,alexjc/pylearn2,lancezlin/pylearn2,lisa-lab/pylearn2,w1kke/pylearn2,abergeron/pylearn2,kastnerkyle/pylearn2,w1kke/pylearn2,KennethPierce/pylearnk,lunyang/pylearn2,woozzu/pylearn2,msingh172/pylearn2,kose-y/pylearn2,fulmicoton/pylearn2,bartvm/pylearn2,pombredanne/pylearn2,jeremyfix/pylearn2,hyqneuron/pylearn2-maxsom,theoryno3/pylearn2,se4u/pylearn2,lancezlin/pylearn2,nouiz/pylearn2,fyffyt/pylearn2,fyffyt/pylearn2,chrish42/pylearn,msingh172/pylearn2,JesseLivezey/plankton,shiquanwang/pylearn2,lamblin/pylearn2,JesseLivezey/pylearn2,sandeepkbhat/pylearn2,ddboline/pylearn2,lancezlin/pylearn2,ddboline/pylearn2,jeremyfix/pylearn2,junbochen/pylearn2,jamessergeant/pylearn2,goodfeli/pylearn2,jamessergeant/pylearn2,jamessergeant/pylearn2,mclaughlin6464/pylearn2,jeremyfix/pylearn2,shiquanwang/pylearn2,shiquanwang/pylearn2,Refefer/pylearn2,fishcorn/pylearn2,pombredanne/pylearn2,caidongyun/pylearn2,hantek/pylearn2,junbochen/pylearn2,hantek/pylearn2,daemonmaker/pylearn2,JesseLivezey/plankton,kastnerkyle/pylearn2,JesseLivezey/plankton,skearnes/pylearn2,JesseLivezey/pylearn2,ashhher3/pylearn2,matrogers/pylearn2,lunyang/pylearn2,w1kke/pylearn2,CIFASIS/pylearn2,mclaughlin6464/pylearn2,hantek/pylearn2,pkainz/pylearn2,Refefer/pylearn2,bartvm/pylearn2,TNick/pylearn2,daemonmaker/pylearn2,goodfeli/pylearn2,shiquanwang/pylearn2,skearnes/pylearn2,lisa-lab/pylearn2,goodfeli/pylearn2,aalmah/pylearn2,fulmicoton/pylearn2,jamessergeant/pylearn2,msingh172/pylearn2,TNick/pylearn2,nouiz/pylearn2,ddboline/pylearn2,skearnes/pylearn2,fishcorn/pylearn2,KennethPierce/pylearnk,kose-y/pylearn2,daemonmaker/pylearn2,KennethPierce/pylearnk,chrish42/pylearn,junbochen/pylearn2,Refefer/pylearn2,alexjc/pylearn2,woozzu/pylearn2,woozzu/pylearn2,nouiz/pylearn2,TNick/pylearn2,abergeron/pylearn2,chrish42/pylearn,lamblin/pylearn2,pombredanne/pylearn2,lisa-lab/pylearn2,goodfeli/pylearn2,lamblin/pylearn2,kose-y/pylearn2,caidongyun/pylearn2,cosmoharrigan/pylearn2,CIFASIS/pylearn2,cosmoharrigan/pylearn2,w1kke/pylearn2,matrogers/pylearn2,lamblin/pylearn2,fulmicoton/pylearn2,fyffyt/pylearn2,JesseLivezey/pylearn2,aalmah/pylearn2,msingh172/pylearn2,nouiz/pylearn2,theoryno3/pylearn2,se4u/pylearn2,aalmah/pylearn2,se4u/pylearn2,pkainz/pylearn2,Refefer/pylearn2,CIFASIS/pylearn2,cosmoharrigan/pylearn2,junbochen/pylearn2,mkraemer67/pylearn2,mclaughlin6464/pylearn2,chrish42/pylearn,KennethPierce/pylearnk,matrogers/pylearn2,caidongyun/pylearn2,hyqneuron/pylearn2-maxsom,hyqneuron/pylearn2-maxsom,ashhher3/pylearn2,kastnerkyle/pylearn2,lunyang/pylearn2,mkraemer67/pylearn2,alexjc/pylearn2,se4u/pylearn2,abergeron/pylearn2
""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll Add more info to test_ais docstring
""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation by comparing the output of estimate_likelihood to Russ's code's output for the same parameters. """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll
<commit_before>""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll <commit_msg>Add more info to test_ais docstring<commit_after>
""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation by comparing the output of estimate_likelihood to Russ's code's output for the same parameters. """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll
""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll Add more info to test_ais docstring""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation by comparing the output of estimate_likelihood to Russ's code's output for the same parameters. """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll
<commit_before>""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll <commit_msg>Add more info to test_ais docstring<commit_after>""" Test dbm_metrics script """ from pylearn2.scripts.dbm import dbm_metrics from pylearn2.datasets.mnist import MNIST def test_ais(): """ Test ais computation by comparing the output of estimate_likelihood to Russ's code's output for the same parameters. """ w_list = [None] b_list = [] # Add parameters import trainset = MNIST(which_set='train') testset = MNIST(which_set='test') train_ll, test_ll, log_z = dbm_metrics.estimate_likelihood(w_list, b_list, trainset, testset, pos_mf_steps=5) # Add log_z, test_ll import russ_log_z = 100. russ_train_ll = -100. russ_test_ll = -100. assert log_z == russ_log_z assert train_ll == russ_train_ll assert test_ll == russ_test_ll
3225c14ed1c3d09a68d6cde8af6d83d54a6f5f76
simple_history/__init__.py
simple_history/__init__.py
from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: records_class = records_class or models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model
from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: if records_class is None: records_class = models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model
Change style of setting records_class default
Change style of setting records_class default
Python
bsd-3-clause
emergence/django-simple-history,luzfcb/django-simple-history,treyhunner/django-simple-history,pombredanne/django-simple-history,pombredanne/django-simple-history,treyhunner/django-simple-history,emergence/django-simple-history,luzfcb/django-simple-history
from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: records_class = records_class or models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model Change style of setting records_class default
from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: if records_class is None: records_class = models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model
<commit_before>from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: records_class = records_class or models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model <commit_msg>Change style of setting records_class default<commit_after>
from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: if records_class is None: records_class = models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model
from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: records_class = records_class or models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model Change style of setting records_class defaultfrom __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: if records_class is None: records_class = models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model
<commit_before>from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: records_class = records_class or models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model <commit_msg>Change style of setting records_class default<commit_after>from __future__ import unicode_literals __version__ = '1.5.1' def register( model, app=None, manager_name='history', records_class=None, **records_config): """ Create historical model for `model` and attach history manager to `model`. Keyword arguments: app -- App to install historical model into (defaults to model.__module__) manager_name -- class attribute name to use for historical manager records_class -- class to use for history relation (defaults to HistoricalRecords) This method should be used as an alternative to attaching an `HistoricalManager` instance directly to `model`. """ from . import models if model._meta.db_table not in models.registered_models: if records_class is None: records_class = models.HistoricalRecords records = records_class(**records_config) records.manager_name = manager_name records.module = app and ("%s.models" % app) or model.__module__ records.add_extra_methods(model) records.finalize(model) models.registered_models[model._meta.db_table] = model
e7bbfb94aed0109ccf1609333b8990f21e5f561c
pyhpeimc/__init__.py
pyhpeimc/__init__.py
#!/usr/bin/python3 '''Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.'''
#!/usr/bin/env python # -*- coding: <encoding-name> -*- ''' Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. '''
Fix in groups.py for get_custom_views function.
Fix in groups.py for get_custom_views function.
Python
apache-2.0
HPNetworking/HP-Intelligent-Management-Center,netmanchris/PYHPEIMC,HPENetworking/PYHPEIMC
#!/usr/bin/python3 '''Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.'''Fix in groups.py for get_custom_views function.
#!/usr/bin/env python # -*- coding: <encoding-name> -*- ''' Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. '''
<commit_before>#!/usr/bin/python3 '''Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.'''<commit_msg>Fix in groups.py for get_custom_views function.<commit_after>
#!/usr/bin/env python # -*- coding: <encoding-name> -*- ''' Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. '''
#!/usr/bin/python3 '''Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.'''Fix in groups.py for get_custom_views function.#!/usr/bin/env python # -*- coding: <encoding-name> -*- ''' Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. '''
<commit_before>#!/usr/bin/python3 '''Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.'''<commit_msg>Fix in groups.py for get_custom_views function.<commit_after>#!/usr/bin/env python # -*- coding: <encoding-name> -*- ''' Copyright 2015 Hewlett Packard Enterprise Development LP Licensed under the Apache License, Version 2.0 (the “License”); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. '''
522edf619f1bebf855f24da6f84c90b10f866745
slack_to_habitica/views.py
slack_to_habitica/views.py
import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '<%s says> %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data)
import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '[%s says] %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data)
Use square brackets instead of angle brackets in messages, as the latter are getting stripped out
Use square brackets instead of angle brackets in messages, as the latter are getting stripped out
Python
mit
niteshpatel/habitica-slack
import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '<%s says> %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data) Use square brackets instead of angle brackets in messages, as the latter are getting stripped out
import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '[%s says] %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data)
<commit_before>import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '<%s says> %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data) <commit_msg>Use square brackets instead of angle brackets in messages, as the latter are getting stripped out<commit_after>
import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '[%s says] %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data)
import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '<%s says> %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data) Use square brackets instead of angle brackets in messages, as the latter are getting stripped outimport os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '[%s says] %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data)
<commit_before>import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '<%s says> %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data) <commit_msg>Use square brackets instead of angle brackets in messages, as the latter are getting stripped out<commit_after>import os import json import requests from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt @csrf_exempt def process_slack_message(request): send_message( request.POST.get('user_name'), request.POST.get('text')) return HttpResponse() def send_message(user, text): api_user = os.environ['HABITICA_APIUSER'] api_key = os.environ['HABITICA_APIKEY'] group_id = os.environ['HABITICA_GROUPID'] habitica_url = 'https://habitica.com/api/v3/groups/%s/chat' % group_id headers = { 'x-api-user': api_user, 'x-api-key': api_key } data = { 'groupId': group_id, 'message': '[%s says] %s' % (user, text) } response = requests.post(habitica_url, headers=headers, data=data)
783f59a41cde3c887968920251aa34b6a59c941b
source/cytoplasm/errors.py
source/cytoplasm/errors.py
class ControllerError(StandardError): pass class InterpreterError(StandardError): pass
class CytoplasmError(Exception): pass class ControllerError(CytoplasmError): pass class InterpreterError(CytoplasmError): pass
Use Exception instead of StandardError
Use Exception instead of StandardError Python 3 doesn't have StandardError...
Python
mit
startling/cytoplasm
class ControllerError(StandardError): pass class InterpreterError(StandardError): pass Use Exception instead of StandardError Python 3 doesn't have StandardError...
class CytoplasmError(Exception): pass class ControllerError(CytoplasmError): pass class InterpreterError(CytoplasmError): pass
<commit_before>class ControllerError(StandardError): pass class InterpreterError(StandardError): pass <commit_msg>Use Exception instead of StandardError Python 3 doesn't have StandardError...<commit_after>
class CytoplasmError(Exception): pass class ControllerError(CytoplasmError): pass class InterpreterError(CytoplasmError): pass
class ControllerError(StandardError): pass class InterpreterError(StandardError): pass Use Exception instead of StandardError Python 3 doesn't have StandardError...class CytoplasmError(Exception): pass class ControllerError(CytoplasmError): pass class InterpreterError(CytoplasmError): pass
<commit_before>class ControllerError(StandardError): pass class InterpreterError(StandardError): pass <commit_msg>Use Exception instead of StandardError Python 3 doesn't have StandardError...<commit_after>class CytoplasmError(Exception): pass class ControllerError(CytoplasmError): pass class InterpreterError(CytoplasmError): pass
f1af343bf48843c8298ef6f07227402be1f4e511
angr/engines/soot/values/thisref.py
angr/engines/soot/values/thisref.py
from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("%s.this" % soot_value.type, soot_value.type) return state.memory.load(local) def __repr__(self): return self.id
from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("this", soot_value.type) return state.memory.load(local) def __repr__(self): return self.id
Fix naming of 'this' reference
Fix naming of 'this' reference
Python
bsd-2-clause
schieb/angr,schieb/angr,angr/angr,angr/angr,iamahuman/angr,iamahuman/angr,schieb/angr,angr/angr,iamahuman/angr
from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("%s.this" % soot_value.type, soot_value.type) return state.memory.load(local) def __repr__(self): return self.idFix naming of 'this' reference
from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("this", soot_value.type) return state.memory.load(local) def __repr__(self): return self.id
<commit_before> from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("%s.this" % soot_value.type, soot_value.type) return state.memory.load(local) def __repr__(self): return self.id<commit_msg>Fix naming of 'this' reference<commit_after>
from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("this", soot_value.type) return state.memory.load(local) def __repr__(self): return self.id
from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("%s.this" % soot_value.type, soot_value.type) return state.memory.load(local) def __repr__(self): return self.idFix naming of 'this' reference from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("this", soot_value.type) return state.memory.load(local) def __repr__(self): return self.id
<commit_before> from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("%s.this" % soot_value.type, soot_value.type) return state.memory.load(local) def __repr__(self): return self.id<commit_msg>Fix naming of 'this' reference<commit_after> from .base import SimSootValue from .local import SimSootValue_Local class SimSootValue_ThisRef(SimSootValue): __slots__ = [ 'id', 'type', 'heap_alloc_id' ] def __init__(self, heap_alloc_id, type_): self.id = self._create_unique_id(heap_alloc_id, type_) self.heap_alloc_id = heap_alloc_id self.type = type_ @staticmethod def _create_unique_id(heap_alloc_id, class_name): return "%s.%s.this" % (heap_alloc_id, class_name) @classmethod def from_sootvalue(cls, soot_value, state): local = SimSootValue_Local("this", soot_value.type) return state.memory.load(local) def __repr__(self): return self.id
0c753e644068439376493e4b23a1060d742770ae
tests/__main__.py
tests/__main__.py
import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') unittest.TextTestRunner().run(all_tests)
import sys import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') ret = unittest.TextTestRunner().run(all_tests).wasSuccessful() sys.exit(ret)
Fix an issue when unit tests always return 0 status.
Fix an issue when unit tests always return 0 status.
Python
mit
sergeymironov0001/twitch-chat-bot
import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') unittest.TextTestRunner().run(all_tests) Fix an issue when unit tests always return 0 status.
import sys import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') ret = unittest.TextTestRunner().run(all_tests).wasSuccessful() sys.exit(ret)
<commit_before>import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') unittest.TextTestRunner().run(all_tests) <commit_msg>Fix an issue when unit tests always return 0 status.<commit_after>
import sys import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') ret = unittest.TextTestRunner().run(all_tests).wasSuccessful() sys.exit(ret)
import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') unittest.TextTestRunner().run(all_tests) Fix an issue when unit tests always return 0 status.import sys import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') ret = unittest.TextTestRunner().run(all_tests).wasSuccessful() sys.exit(ret)
<commit_before>import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') unittest.TextTestRunner().run(all_tests) <commit_msg>Fix an issue when unit tests always return 0 status.<commit_after>import sys import unittest if __name__ == '__main__': all_tests = unittest.TestLoader().discover('./', pattern='*_tests.py') ret = unittest.TextTestRunner().run(all_tests).wasSuccessful() sys.exit(ret)
966c22d4bae270a14176ae1c7b9887eb55743612
tests/conftest.py
tests/conftest.py
import datetime import odin.datetimeutil ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, 'Mountain War Time') BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
import os import sys import datetime import odin.datetimeutil HERE = os.path.abspath(os.path.dirname(__file__)) SRC = os.path.normpath(os.path.join(HERE, "..", "src")) sys.path.insert(0, SRC) ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time") BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
Update tests to find source path
Update tests to find source path
Python
bsd-3-clause
python-odin/odin
import datetime import odin.datetimeutil ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, 'Mountain War Time') BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT) Update tests to find source path
import os import sys import datetime import odin.datetimeutil HERE = os.path.abspath(os.path.dirname(__file__)) SRC = os.path.normpath(os.path.join(HERE, "..", "src")) sys.path.insert(0, SRC) ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time") BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
<commit_before>import datetime import odin.datetimeutil ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, 'Mountain War Time') BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT) <commit_msg>Update tests to find source path<commit_after>
import os import sys import datetime import odin.datetimeutil HERE = os.path.abspath(os.path.dirname(__file__)) SRC = os.path.normpath(os.path.join(HERE, "..", "src")) sys.path.insert(0, SRC) ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time") BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
import datetime import odin.datetimeutil ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, 'Mountain War Time') BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT) Update tests to find source pathimport os import sys import datetime import odin.datetimeutil HERE = os.path.abspath(os.path.dirname(__file__)) SRC = os.path.normpath(os.path.join(HERE, "..", "src")) sys.path.insert(0, SRC) ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time") BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
<commit_before>import datetime import odin.datetimeutil ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, 'Mountain War Time') BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT) <commit_msg>Update tests to find source path<commit_after>import os import sys import datetime import odin.datetimeutil HERE = os.path.abspath(os.path.dirname(__file__)) SRC = os.path.normpath(os.path.join(HERE, "..", "src")) sys.path.insert(0, SRC) ARE_YOU_EXPERIENCED = datetime.date(1967, 5, 12) MWT = odin.datetimeutil.FixedTimezone(-6, "Mountain War Time") BOOM = datetime.datetime(1945, 7, 16, 5, 29, 45, 0, MWT)
59120e42b73c5ff064301d1c9e2d5667903d0642
tests/settings.py
tests/settings.py
from django import VERSION DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 _MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) if VERSION < (1, 10): MIDDLEWARE_CLASSES = _MIDDLEWARE else: MIDDLEWARE = _MIDDLEWARE TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ]
DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ]
Drop a special case for Django <1.10
Drop a special case for Django <1.10
Python
bsd-3-clause
founders4schools/django-donations,founders4schools/django-donations
from django import VERSION DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 _MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) if VERSION < (1, 10): MIDDLEWARE_CLASSES = _MIDDLEWARE else: MIDDLEWARE = _MIDDLEWARE TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ] Drop a special case for Django <1.10
DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ]
<commit_before>from django import VERSION DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 _MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) if VERSION < (1, 10): MIDDLEWARE_CLASSES = _MIDDLEWARE else: MIDDLEWARE = _MIDDLEWARE TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ] <commit_msg>Drop a special case for Django <1.10<commit_after>
DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ]
from django import VERSION DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 _MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) if VERSION < (1, 10): MIDDLEWARE_CLASSES = _MIDDLEWARE else: MIDDLEWARE = _MIDDLEWARE TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ] Drop a special case for Django <1.10DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ]
<commit_before>from django import VERSION DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 _MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) if VERSION < (1, 10): MIDDLEWARE_CLASSES = _MIDDLEWARE else: MIDDLEWARE = _MIDDLEWARE TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ] <commit_msg>Drop a special case for Django <1.10<commit_after>DEBUG = True USE_TZ = True # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = "something-not-secret" DATABASES = { "default": { "ENGINE": "django.db.backends.sqlite3", "NAME": ":memory:", } } ROOT_URLCONF = "tests.urls" INSTALLED_APPS = [ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.messages", "django.contrib.sessions", "django.contrib.sites", "django.contrib.admin", "rest_framework", "donations", "tests", ] SITE_ID = 1 MIDDLEWARE = ( 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ] } }, ]
37e569bed66e18e0ae80222f2988277023e19916
tests/test_cli.py
tests/test_cli.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n'
# -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output def test_with_package_url(self, runner): result = runner.invoke(pypi.cli, ['stat', 'http://pypi.python.org/pypi/webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output @pytest.mark.usefixtures('mock_api') class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n'
Add test for inputting package URL
Add test for inputting package URL
Python
mit
pombredanne/pypi-cli,sloria/pypi-cli,mindw/pypi-cli
# -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n' Add test for inputting package URL
# -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output def test_with_package_url(self, runner): result = runner.invoke(pypi.cli, ['stat', 'http://pypi.python.org/pypi/webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output @pytest.mark.usefixtures('mock_api') class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n'
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n' <commit_msg>Add test for inputting package URL<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output def test_with_package_url(self, runner): result = runner.invoke(pypi.cli, ['stat', 'http://pypi.python.org/pypi/webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output @pytest.mark.usefixtures('mock_api') class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n'
# -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n' Add test for inputting package URL# -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output def test_with_package_url(self, runner): result = runner.invoke(pypi.cli, ['stat', 'http://pypi.python.org/pypi/webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output @pytest.mark.usefixtures('mock_api') class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n'
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n' <commit_msg>Add test for inputting package URL<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals import mock import pytest import pypi_cli as pypi @pytest.mark.usefixtures('mock_api') class TestStat: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['stat']) assert result.exit_code > 0 def test_with_package(self, runner): result = runner.invoke(pypi.cli, ['stat', 'webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output def test_with_package_url(self, runner): result = runner.invoke(pypi.cli, ['stat', 'http://pypi.python.org/pypi/webargs']) assert result.exit_code == 0 assert 'Download statistics for webargs' in result.output @pytest.mark.usefixtures('mock_api') class TestBrowse: def test_missing_package_arg(self, runner): result = runner.invoke(pypi.cli, ['browse']) assert result.exit_code > 0 @mock.patch('pypi_cli.click.termui.launch') def test_with_package(self, mock_launch, runner): result = runner.invoke(pypi.cli, ['browse', 'webargs']) assert result.exit_code == 0 assert mock_launch.called is True def test_version(runner): result = runner.invoke(pypi.cli, ['-v']) assert result.output == pypi.__version__ + '\n' result = runner.invoke(pypi.cli, ['--version']) assert result.output == pypi.__version__ + '\n'
6c211bce96eaca17de770b82aab8dac07ff0c2fd
src/dictsdiff/cli.py
src/dictsdiff/cli.py
""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) with pandas.option_context('display.max_rows', None, 'display.max_columns', None): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns))
""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys try: from shutil import get_terminal_size except ImportError: def get_terminal_size(): from subprocess import check_output out = check_output(['stty', 'size'], universal_newlines=True) rows, columns = map(int, out.strip().split()) return columns, rows def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) # Manually detect terminal size, since passing "'display.width', # None" does not detect terminal size (as advertised in # https://pandas.pydata.org/pandas-docs/stable/options.html): width, _ = get_terminal_size() with pandas.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', width): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns))
Set display.width based on terminal size
Set display.width based on terminal size
Python
bsd-2-clause
tkf/dictsdiff
""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) with pandas.option_context('display.max_rows', None, 'display.max_columns', None): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns)) Set display.width based on terminal size
""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys try: from shutil import get_terminal_size except ImportError: def get_terminal_size(): from subprocess import check_output out = check_output(['stty', 'size'], universal_newlines=True) rows, columns = map(int, out.strip().split()) return columns, rows def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) # Manually detect terminal size, since passing "'display.width', # None" does not detect terminal size (as advertised in # https://pandas.pydata.org/pandas-docs/stable/options.html): width, _ = get_terminal_size() with pandas.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', width): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns))
<commit_before>""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) with pandas.option_context('display.max_rows', None, 'display.max_columns', None): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns)) <commit_msg>Set display.width based on terminal size<commit_after>
""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys try: from shutil import get_terminal_size except ImportError: def get_terminal_size(): from subprocess import check_output out = check_output(['stty', 'size'], universal_newlines=True) rows, columns = map(int, out.strip().split()) return columns, rows def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) # Manually detect terminal size, since passing "'display.width', # None" does not detect terminal size (as advertised in # https://pandas.pydata.org/pandas-docs/stable/options.html): width, _ = get_terminal_size() with pandas.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', width): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns))
""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) with pandas.option_context('display.max_rows', None, 'display.max_columns', None): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns)) Set display.width based on terminal size""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys try: from shutil import get_terminal_size except ImportError: def get_terminal_size(): from subprocess import check_output out = check_output(['stty', 'size'], universal_newlines=True) rows, columns = map(int, out.strip().split()) return columns, rows def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) # Manually detect terminal size, since passing "'display.width', # None" does not detect terminal size (as advertised in # https://pandas.pydata.org/pandas-docs/stable/options.html): width, _ = get_terminal_size() with pandas.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', width): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns))
<commit_before>""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) with pandas.option_context('display.max_rows', None, 'display.max_columns', None): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns)) <commit_msg>Set display.width based on terminal size<commit_after>""" Compare multiple similar dictionary data in JSON/YAML/Pickle files. """ from __future__ import print_function import sys try: from shutil import get_terminal_size except ImportError: def get_terminal_size(): from subprocess import check_output out = check_output(['stty', 'size'], universal_newlines=True) rows, columns = map(int, out.strip().split()) return columns, rows def dictsdiff_cli(files): import pandas from .loader import diff_files, diff_ndjson if files: dd = diff_files(files) else: dd = diff_ndjson(sys.stdin) # Manually detect terminal size, since passing "'display.width', # None" does not detect terminal size (as advertised in # https://pandas.pydata.org/pandas-docs/stable/options.html): width, _ = get_terminal_size() with pandas.option_context('display.max_rows', None, 'display.max_columns', None, 'display.width', width): print(dd.pretty_diff()) def make_parser(doc=__doc__): import argparse parser = argparse.ArgumentParser( formatter_class=type('FormatterClass', (argparse.RawDescriptionHelpFormatter, argparse.ArgumentDefaultsHelpFormatter), {}), description=doc) parser.add_argument( 'files', metavar='FILE', nargs='*', ) return parser def main(args=None): parser = make_parser() ns = parser.parse_args(args) dictsdiff_cli(**vars(ns))
10bbc402a46e2832f8e62359fd2d86b7ebf7fd84
cloudbot/symfony.py
cloudbot/symfony.py
from util import hook from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)
from util import hook import elasticutils from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): if not elasticutils.get_es().indices.exists('doc-index'): return "Index currently unavailable. Try again in a bit." search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)
Check if the index is available before calling the query.
Check if the index is available before calling the query.
Python
mit
mitom/symfony-doc-bot,mitom/symfony-doc-bot
from util import hook from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)Check if the index is available before calling the query.
from util import hook import elasticutils from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): if not elasticutils.get_es().indices.exists('doc-index'): return "Index currently unavailable. Try again in a bit." search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)
<commit_before>from util import hook from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)<commit_msg>Check if the index is available before calling the query.<commit_after>
from util import hook import elasticutils from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): if not elasticutils.get_es().indices.exists('doc-index'): return "Index currently unavailable. Try again in a bit." search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)
from util import hook from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)Check if the index is available before calling the query.from util import hook import elasticutils from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): if not elasticutils.get_es().indices.exists('doc-index'): return "Index currently unavailable. Try again in a bit." search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)
<commit_before>from util import hook from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)<commit_msg>Check if the index is available before calling the query.<commit_after>from util import hook import elasticutils from elasticutils import S @hook.command @hook.command('sf') def symfony(inp): if not elasticutils.get_es().indices.exists('doc-index'): return "Index currently unavailable. Try again in a bit." search = S().indexes('doc-index').doctypes('doc-section-type') # cant fit more than 3 links into 1 irs message results = search.query(tags__match=inp, title__match=inp, content__match=inp, should=True)[:3].execute() if not len(results): return "Sorry, seems like I can't help you with that." topScore = results.results[0]['_score'] matches = [] print topScore for result in results: if result._score + 1 >= topScore: matches.append(result.id) if len(matches) > 1: responseText = "These are the docs I found most relevant for you: %s" else: responseText = "This is what I found most relevant for you: %s" return responseText % ', '.join(matches)
f48554bcc5ac1161314592cb43ba65701d387289
tests/test_check_endpoint.py
tests/test_check_endpoint.py
import pytest def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert false def test_send_email(): assert False
import pytest # We're going to fake a connection for purposes of testing. # So far all we use is getpeercert method, so that's all we need to fake class fake_connection(object): def __init__(self): pass def getpeercert(self): cert_details = {'notAfter': 'Dec 31 00:00:00 2015 GMT', 'subjectAltName': (('DNS', 'www.fake.com'),), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'Oregon'),), (('localityName', u'Springfield'),), (('organizationName', u'FakeCompany'),), (('commonName', u'fake.com'),))} return cert_details def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert False def test_send_email(): assert False
Add fake connection class, PEP8 changes
Add fake connection class, PEP8 changes Also had a bad assert in there
Python
mit
twirrim/checkendpoint
import pytest def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert false def test_send_email(): assert FalseAdd fake connection class, PEP8 changes Also had a bad assert in there
import pytest # We're going to fake a connection for purposes of testing. # So far all we use is getpeercert method, so that's all we need to fake class fake_connection(object): def __init__(self): pass def getpeercert(self): cert_details = {'notAfter': 'Dec 31 00:00:00 2015 GMT', 'subjectAltName': (('DNS', 'www.fake.com'),), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'Oregon'),), (('localityName', u'Springfield'),), (('organizationName', u'FakeCompany'),), (('commonName', u'fake.com'),))} return cert_details def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert False def test_send_email(): assert False
<commit_before>import pytest def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert false def test_send_email(): assert False<commit_msg>Add fake connection class, PEP8 changes Also had a bad assert in there<commit_after>
import pytest # We're going to fake a connection for purposes of testing. # So far all we use is getpeercert method, so that's all we need to fake class fake_connection(object): def __init__(self): pass def getpeercert(self): cert_details = {'notAfter': 'Dec 31 00:00:00 2015 GMT', 'subjectAltName': (('DNS', 'www.fake.com'),), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'Oregon'),), (('localityName', u'Springfield'),), (('organizationName', u'FakeCompany'),), (('commonName', u'fake.com'),))} return cert_details def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert False def test_send_email(): assert False
import pytest def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert false def test_send_email(): assert FalseAdd fake connection class, PEP8 changes Also had a bad assert in thereimport pytest # We're going to fake a connection for purposes of testing. # So far all we use is getpeercert method, so that's all we need to fake class fake_connection(object): def __init__(self): pass def getpeercert(self): cert_details = {'notAfter': 'Dec 31 00:00:00 2015 GMT', 'subjectAltName': (('DNS', 'www.fake.com'),), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'Oregon'),), (('localityName', u'Springfield'),), (('organizationName', u'FakeCompany'),), (('commonName', u'fake.com'),))} return cert_details def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert False def test_send_email(): assert False
<commit_before>import pytest def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert false def test_send_email(): assert False<commit_msg>Add fake connection class, PEP8 changes Also had a bad assert in there<commit_after>import pytest # We're going to fake a connection for purposes of testing. # So far all we use is getpeercert method, so that's all we need to fake class fake_connection(object): def __init__(self): pass def getpeercert(self): cert_details = {'notAfter': 'Dec 31 00:00:00 2015 GMT', 'subjectAltName': (('DNS', 'www.fake.com'),), 'subject': ((('countryName', u'US'),), (('stateOrProvinceName', u'Oregon'),), (('localityName', u'Springfield'),), (('organizationName', u'FakeCompany'),), (('commonName', u'fake.com'),))} return cert_details def test_get_connection(): assert False def test_verify_hostname_with_valid_hostname(): assert False def test_verify_hostname_with_valid_altname(): assert False def test_verify_hostname_with_invalid_hostname(): assert False def test_expiring_certificate_with_good_cert(): assert False def test_expiring_certificate_with_bad_cert(): assert False def test_send_email(): assert False
4c07583ff5bd3e4e7d542b085807c4547328f1e8
settings/travis.py
settings/travis.py
from defaults import * DEBUG=False DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
from defaults import * DEBUG=False SECRET_KEY='+9*_1$hry$2r5#723%_a@uju&-skn)^042r+d_eupq*az8o^(w' DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
Add SECRET_KEY to Travis CI settings
Add SECRET_KEY to Travis CI settings ``settings.SECRET_KEY`` is required in Django 1.5 Addresses #660
Python
mit
denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase
from defaults import * DEBUG=False DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' Add SECRET_KEY to Travis CI settings ``settings.SECRET_KEY`` is required in Django 1.5 Addresses #660
from defaults import * DEBUG=False SECRET_KEY='+9*_1$hry$2r5#723%_a@uju&-skn)^042r+d_eupq*az8o^(w' DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
<commit_before>from defaults import * DEBUG=False DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' <commit_msg>Add SECRET_KEY to Travis CI settings ``settings.SECRET_KEY`` is required in Django 1.5 Addresses #660<commit_after>
from defaults import * DEBUG=False SECRET_KEY='+9*_1$hry$2r5#723%_a@uju&-skn)^042r+d_eupq*az8o^(w' DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
from defaults import * DEBUG=False DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' Add SECRET_KEY to Travis CI settings ``settings.SECRET_KEY`` is required in Django 1.5 Addresses #660from defaults import * DEBUG=False SECRET_KEY='+9*_1$hry$2r5#723%_a@uju&-skn)^042r+d_eupq*az8o^(w' DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
<commit_before>from defaults import * DEBUG=False DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' <commit_msg>Add SECRET_KEY to Travis CI settings ``settings.SECRET_KEY`` is required in Django 1.5 Addresses #660<commit_after>from defaults import * DEBUG=False SECRET_KEY='+9*_1$hry$2r5#723%_a@uju&-skn)^042r+d_eupq*az8o^(w' DATABASES = { 'default': { #'ENGINE': 'django.db.backends.postgresql_psycopg2', 'ENGINE': 'django.contrib.gis.db.backends.postgis', # 'ENGINE': 'django.db.backends.sqlite3', 'NAME': 'atlas_travis', 'USER': 'postgres', 'PASSWORD': '', 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Haystack doesn't correctly hook up RealTimeIndex signals when # migrations are enabled, so disable migrations. # See https://github.com/toastdriven/django-haystack/issues/599 SOUTH_TESTS_MIGRATE = False HAYSTACK_CONNECTIONS = { 'default': { 'ENGINE': 'storybase_geo.search.backends.Solr2155Engine', 'URL': 'http://localhost:8983/solr/travis', # If the Solr/Jetty install on Travis CI is broken, use the # mock backend which will cause some tests to be skipped #'ENGINE': 'haystack.backends.simple_backend.SimpleEngine', }, } EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
d7025f92a240284d130ce455b6975ede42d0228e
chalice/cli/filewatch/eventbased.py
chalice/cli/filewatch/eventbased.py
import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers from watchdog.events import FileSystemEventHandler from watchdog.events import FileSystemEvent # noqa from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set()
import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers # pylint: disable=import-error from watchdog import events # pylint: disable=import-error from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(events.FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (events.FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set()
Make prcheck pass without needing cond deps
Make prcheck pass without needing cond deps
Python
apache-2.0
awslabs/chalice
import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers from watchdog.events import FileSystemEventHandler from watchdog.events import FileSystemEvent # noqa from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set() Make prcheck pass without needing cond deps
import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers # pylint: disable=import-error from watchdog import events # pylint: disable=import-error from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(events.FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (events.FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set()
<commit_before>import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers from watchdog.events import FileSystemEventHandler from watchdog.events import FileSystemEvent # noqa from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set() <commit_msg>Make prcheck pass without needing cond deps<commit_after>
import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers # pylint: disable=import-error from watchdog import events # pylint: disable=import-error from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(events.FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (events.FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set()
import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers from watchdog.events import FileSystemEventHandler from watchdog.events import FileSystemEvent # noqa from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set() Make prcheck pass without needing cond depsimport threading # noqa from typing import Callable, Optional # noqa import watchdog.observers # pylint: disable=import-error from watchdog import events # pylint: disable=import-error from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(events.FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (events.FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set()
<commit_before>import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers from watchdog.events import FileSystemEventHandler from watchdog.events import FileSystemEvent # noqa from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set() <commit_msg>Make prcheck pass without needing cond deps<commit_after>import threading # noqa from typing import Callable, Optional # noqa import watchdog.observers # pylint: disable=import-error from watchdog import events # pylint: disable=import-error from chalice.cli.filewatch import FileWatcher, WorkerProcess class WatchdogWorkerProcess(WorkerProcess): """Worker that runs the chalice dev server.""" def _start_file_watcher(self, project_dir): # type: (str) -> None restart_callback = WatchdogRestarter(self._restart_event) watcher = WatchdogFileWatcher() watcher.watch_for_file_changes( project_dir, restart_callback) class WatchdogFileWatcher(FileWatcher): def watch_for_file_changes(self, root_dir, callback): # type: (str, Callable[[], None]) -> None observer = watchdog.observers.Observer() observer.schedule(callback, root_dir, recursive=True) observer.start() class WatchdogRestarter(events.FileSystemEventHandler): def __init__(self, restart_event): # type: (threading.Event) -> None # The reason we're using threading self.restart_event = restart_event def on_any_event(self, event): # type: (events.FileSystemEvent) -> None # If we modify a file we'll get a FileModifiedEvent # as well as a DirectoryModifiedEvent. # We only care about reloading is a file is modified. if event.is_directory: return self() def __call__(self): # type: () -> None self.restart_event.set()
1d302213b76cac74afaf71eb799ece022883877f
trakt/__init__.py
trakt/__init__.py
from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())
from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ # NOQA from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())
Disable flake8 warning on `__version__` import
Disable flake8 warning on `__version__` import
Python
mit
fuzeman/trakt.py
from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) Disable flake8 warning on `__version__` import
from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ # NOQA from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())
<commit_before>from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) <commit_msg>Disable flake8 warning on `__version__` import<commit_after>
from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ # NOQA from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())
from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) Disable flake8 warning on `__version__` importfrom trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ # NOQA from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())
<commit_before>from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) <commit_msg>Disable flake8 warning on `__version__` import<commit_after>from trakt.core.errors import ERRORS from trakt.core.exceptions import RequestError, ClientError, ServerError from trakt.client import TraktClient from trakt.helpers import has_attribute from trakt.version import __version__ # NOQA from six import add_metaclass __all__ = [ 'Trakt', 'RequestError', 'ClientError', 'ServerError', 'ERRORS' ] class TraktMeta(type): def __getattr__(self, name): if has_attribute(self, name): return super(TraktMeta, self).__getattribute__(name) if self.client is None: self.construct() return getattr(self.client, name) def __setattr__(self, name, value): if has_attribute(self, name): return super(TraktMeta, self).__setattr__(name, value) if self.client is None: self.construct() setattr(self.client, name, value) def __getitem__(self, key): if self.client is None: self.construct() return self.client[key] @add_metaclass(TraktMeta) class Trakt(object): client = None @classmethod def construct(cls): cls.client = TraktClient() # Set default logging handler to avoid "No handler found" warnings. import logging try: # Python 2.7+ from logging import NullHandler except ImportError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler())
08bb24ad80db72457c87533288b97942cc178dd6
src/kanboard/urls.py
src/kanboard/urls.py
import os from django.conf.urls.defaults import patterns, url import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) # Serve static content static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') urlpatterns += patterns('', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) )
from django.conf.urls.defaults import patterns, url urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), )
Remove static file serving (using django-staticfiles instead is recommended)
Remove static file serving (using django-staticfiles instead is recommended)
Python
bsd-3-clause
zellyn/django-kanboard,zellyn/django-kanboard
import os from django.conf.urls.defaults import patterns, url import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) # Serve static content static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') urlpatterns += patterns('', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) ) Remove static file serving (using django-staticfiles instead is recommended)
from django.conf.urls.defaults import patterns, url urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), )
<commit_before>import os from django.conf.urls.defaults import patterns, url import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) # Serve static content static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') urlpatterns += patterns('', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) ) <commit_msg>Remove static file serving (using django-staticfiles instead is recommended)<commit_after>
from django.conf.urls.defaults import patterns, url urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), )
import os from django.conf.urls.defaults import patterns, url import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) # Serve static content static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') urlpatterns += patterns('', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) ) Remove static file serving (using django-staticfiles instead is recommended)from django.conf.urls.defaults import patterns, url urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), )
<commit_before>import os from django.conf.urls.defaults import patterns, url import kanboard urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), ) # Serve static content static_root = os.path.join(os.path.dirname(kanboard.__file__), 'static') urlpatterns += patterns('', (r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': static_root}) ) <commit_msg>Remove static file serving (using django-staticfiles instead is recommended)<commit_after>from django.conf.urls.defaults import patterns, url urlpatterns = patterns('kanboard.views', url(r'^board/(?P<board_slug>[\w-]+)/$', 'board'), url(r'^board/(?P<board_slug>[\w-]+)/update/$', 'update'), )
d63d6070576bf22d60bf9684e417163201814353
webapp/worker.py
webapp/worker.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context if __name__ == '__main__': config_name = os.environ.get('ENVIRONMENT') if config_name is None: sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': try: config_name = get_config_name_from_env() except Exception as e: sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
Use bootstrap utility to retrieve the configuration name from the environment.
Use bootstrap utility to retrieve the configuration name from the environment.
Python
bsd-3-clause
m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context if __name__ == '__main__': config_name = os.environ.get('ENVIRONMENT') if config_name is None: sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work() Use bootstrap utility to retrieve the configuration name from the environment.
#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': try: config_name = get_config_name_from_env() except Exception as e: sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context if __name__ == '__main__': config_name = os.environ.get('ENVIRONMENT') if config_name is None: sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work() <commit_msg>Use bootstrap utility to retrieve the configuration name from the environment.<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': try: config_name = get_config_name_from_env() except Exception as e: sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context if __name__ == '__main__': config_name = os.environ.get('ENVIRONMENT') if config_name is None: sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work() Use bootstrap utility to retrieve the configuration name from the environment.#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': try: config_name = get_config_name_from_env() except Exception as e: sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
<commit_before>#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import os import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context if __name__ == '__main__': config_name = os.environ.get('ENVIRONMENT') if config_name is None: sys.stderr.write("Environment variable 'ENVIRONMENT' must be set but isn't.") sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work() <commit_msg>Use bootstrap utility to retrieve the configuration name from the environment.<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- """Run a worker for the job queue.""" import sys from redis import StrictRedis from rq import Connection, Queue, Worker from bootstrap.util import app_context, get_config_name_from_env if __name__ == '__main__': try: config_name = get_config_name_from_env() except Exception as e: sys.stderr.write(str(e) + '\n') sys.exit() with app_context(config_name) as app: redis = StrictRedis(app.config['REDIS_URL']) with Connection(redis): queues = [Queue()] worker = Worker(queues) worker.work()
dec2222cde98b395aac303af4e005937f4085b89
src/ggrc_workflows/migrations/versions/20140804203436_32221e9f330c_remove_prohibitive_foreign_key_.py
src/ggrc_workflows/migrations/versions/20140804203436_32221e9f330c_remove_prohibitive_foreign_key_.py
"""Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass
"""Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'uq_t_workflows', table_name='workflows', type_='unique') op.drop_constraint( 'uq_t_task_groups', table_name='task_groups', type_='unique') op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass
Remove uniqueness constraints on Workflow and TaskGroup
Remove uniqueness constraints on Workflow and TaskGroup * Titles need not be unique anymore
Python
apache-2.0
vladan-m/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,vladan-m/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,uskudnik/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,uskudnik/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,vladan-m/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,uskudnik/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,hyperNURb/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core
"""Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass Remove uniqueness constraints on Workflow and TaskGroup * Titles need not be unique anymore
"""Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'uq_t_workflows', table_name='workflows', type_='unique') op.drop_constraint( 'uq_t_task_groups', table_name='task_groups', type_='unique') op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass
<commit_before> """Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass <commit_msg>Remove uniqueness constraints on Workflow and TaskGroup * Titles need not be unique anymore<commit_after>
"""Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'uq_t_workflows', table_name='workflows', type_='unique') op.drop_constraint( 'uq_t_task_groups', table_name='task_groups', type_='unique') op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass
"""Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass Remove uniqueness constraints on Workflow and TaskGroup * Titles need not be unique anymore """Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'uq_t_workflows', table_name='workflows', type_='unique') op.drop_constraint( 'uq_t_task_groups', table_name='task_groups', type_='unique') op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass
<commit_before> """Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass <commit_msg>Remove uniqueness constraints on Workflow and TaskGroup * Titles need not be unique anymore<commit_after> """Remove prohibitive foreign key constraints Revision ID: 32221e9f330c Revises: 235b7b9989be Create Date: 2014-08-04 20:34:36.697866 """ # revision identifiers, used by Alembic. revision = '32221e9f330c' down_revision = '235b7b9989be' from alembic import op import sqlalchemy as sa def upgrade(): op.drop_constraint( 'uq_t_workflows', table_name='workflows', type_='unique') op.drop_constraint( 'uq_t_task_groups', table_name='task_groups', type_='unique') op.drop_constraint( 'cycle_task_group_object_tasks_ibfk_4', table_name='cycle_task_group_object_tasks', type_='foreignkey' ) op.drop_constraint( 'cycle_task_group_objects_ibfk_4', table_name='cycle_task_group_objects', type_='foreignkey' ) def downgrade(): pass
a462cca21e8c6456927727cead09f006e63fed16
src/ggrc/models/section.py
src/ggrc/models/section.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ]
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Section, cls).eager_query() return query.options( orm.joinedload('directive'), orm.subqueryload_all('control_sections.control'))
Add eager-loaded links for Section
Add eager-loaded links for Section
Python
apache-2.0
hamyuan/ggrc-self-test,j0gurt/ggrc-core,vladan-m/ggrc-core,ankit-collective/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,2947721120/sagacious-capsicum,plamut/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,2947721120/sagacious-capsicum,josthkko/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,uskudnik/ggrc-core,hamyuan/ggrc-self-test,kr41/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,hamyuan/ggrc-self-test,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,2947721120/sagacious-capsicum,uskudnik/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,ankit-collective/ggrc-core,uskudnik/ggrc-core,2947721120/sagacious-capsicum,ankit-collective/ggrc-core,josthkko/ggrc-core,selahssea/ggrc-core,2947721120/sagacious-capsicum,hasanalom/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,ankit-collective/ggrc-core,vladan-m/ggrc-core,plamut/ggrc-core,hamyuan/ggrc-self-test,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,hasanalom/ggrc-core,uskudnik/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,ankit-collective/ggrc-core,plamut/ggrc-core,hamyuan/ggrc-self-test,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ]Add eager-loaded links for Section
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Section, cls).eager_query() return query.options( orm.joinedload('directive'), orm.subqueryload_all('control_sections.control'))
<commit_before> # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ]<commit_msg>Add eager-loaded links for Section<commit_after>
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Section, cls).eager_query() return query.options( orm.joinedload('directive'), orm.subqueryload_all('control_sections.control'))
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ]Add eager-loaded links for Section # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Section, cls).eager_query() return query.options( orm.joinedload('directive'), orm.subqueryload_all('control_sections.control'))
<commit_before> # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ]<commit_msg>Add eager-loaded links for Section<commit_after> # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: # Maintained By: from ggrc import db from .associationproxy import association_proxy from .mixins import BusinessObject, Hierarchical class Section(Hierarchical, BusinessObject, db.Model): __tablename__ = 'sections' directive_id = db.Column(db.Integer, db.ForeignKey('directives.id')) na = db.Column(db.Boolean, default=False, nullable=False) notes = db.Column(db.Text) control_sections = db.relationship('ControlSection', backref='section') controls = association_proxy('control_sections', 'control', 'ControlSection') _publish_attrs = [ 'directive', 'na', 'notes', 'control_sections', 'controls', ] _update_attrs = [ 'directive', 'na', 'notes', 'controls', ] @classmethod def eager_query(cls): from sqlalchemy import orm query = super(Section, cls).eager_query() return query.options( orm.joinedload('directive'), orm.subqueryload_all('control_sections.control'))
8f1d28a5e7f698bf177412eef92529eb5b360301
keeper/logutils.py
keeper/logutils.py
"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator
"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( "Closing endpoint handler", status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator
Include an event with the response log message
Include an event with the response log message This is now required by structlog
Python
mit
lsst-sqre/ltd-keeper,lsst-sqre/ltd-keeper
"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator Include an event with the response log message This is now required by structlog
"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( "Closing endpoint handler", status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator
<commit_before>"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator <commit_msg>Include an event with the response log message This is now required by structlog<commit_after>
"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( "Closing endpoint handler", status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator
"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator Include an event with the response log message This is now required by structlog"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( "Closing endpoint handler", status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator
<commit_before>"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator <commit_msg>Include an event with the response log message This is now required by structlog<commit_after>"""Logging helpers and utilities.""" import uuid from functools import wraps from timeit import default_timer as timer from typing import Any, Callable, TypeVar import structlog from flask import make_response, request __all__ = ["log_route"] F = TypeVar("F", bound=Callable[..., Any]) def log_route() -> Callable[[F], F]: """Route decorator to initialize a thread-local logger for a route.""" def decorator(f): # type: ignore @wraps(f) def decorated_function(*args, **kwargs): # type: ignore # Initialize a timer to capture the response time # This is for convenience, in addition to route monitoring. start_time = timer() # Initialize a new thread-local logger and add a unique request # ID to its context. # http://www.structlog.org/en/stable/examples.html logger = structlog.get_logger() log = logger.new( request_id=str(uuid.uuid4()), path=request.path, method=request.method, ) # Pass through route response = f(*args, **kwargs) response = make_response(response) # Close out the logger end_time = timer() log.info( "Closing endpoint handler", status=response.status_code, response_time=end_time - start_time, ) return response return decorated_function return decorator
41efafdd229c549636d8feda458c914653d96d4d
setup.py
setup.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='peter.potrebic@gmail.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='oss@box.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box', b'box.test'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main()
Add box.text namespace. Also switch to oss@box.com for email addr.
Add box.text namespace. Also switch to oss@box.com for email addr. Fixes #7
Python
apache-2.0
box/genty,box/genty
# -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='peter.potrebic@gmail.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main() Add box.text namespace. Also switch to oss@box.com for email addr. Fixes #7
# -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='oss@box.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box', b'box.test'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main()
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='peter.potrebic@gmail.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main() <commit_msg>Add box.text namespace. Also switch to oss@box.com for email addr. Fixes #7<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='oss@box.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box', b'box.test'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main()
# -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='peter.potrebic@gmail.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main() Add box.text namespace. Also switch to oss@box.com for email addr. Fixes #7# -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='oss@box.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box', b'box.test'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main()
<commit_before># -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='peter.potrebic@gmail.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main() <commit_msg>Add box.text namespace. Also switch to oss@box.com for email addr. Fixes #7<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from setuptools import setup, find_packages from os.path import dirname, join def main(): base_dir = dirname(__file__) setup( name='genty', version='0.0.1', description='Allows you to run a test with multiple data sets', long_description=open(join(base_dir, 'README.rst')).read(), author='Box', author_email='oss@box.com', url='https://github.com/box/genty', license=open(join(base_dir, 'LICENSE')).read(), packages=find_packages(exclude=['test']), namespace_packages=[b'box', b'box.test'], test_suite='test', zip_safe=False, ) if __name__ == '__main__': main()
1a46149806a66d7f493cf104913ebebde7e6ba5d
chatterbot/adapters/io/tts.py
chatterbot/adapters/io/tts.py
from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text
from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] if platform.system().lower() == 'darwin': subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text
Make sure only Mac computers use the MacOSXTTS io adapter
Make sure only Mac computers use the MacOSXTTS io adapter
Python
bsd-3-clause
Gustavo6046/ChatterBot,maclogan/VirtualPenPal,Reinaesaya/OUIRL-ChatBot,vkosuri/ChatterBot,gunthercox/ChatterBot,davizucon/ChatterBot,Reinaesaya/OUIRL-ChatBot,DarkmatterVale/ChatterBot
from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text Make sure only Mac computers use the MacOSXTTS io adapter
from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] if platform.system().lower() == 'darwin': subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text
<commit_before>from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text <commit_msg>Make sure only Mac computers use the MacOSXTTS io adapter<commit_after>
from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] if platform.system().lower() == 'darwin': subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text
from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text Make sure only Mac computers use the MacOSXTTS io adapterfrom chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] if platform.system().lower() == 'darwin': subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text
<commit_before>from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text <commit_msg>Make sure only Mac computers use the MacOSXTTS io adapter<commit_after>from chatterbot.adapters.io import IOAdapter from chatterbot.utils.read_input import input_function import os import platform import subprocess class MacOSXTTS(IOAdapter): def process_input(self): """ Read the user's input from the terminal. """ user_input = input_function() return user_input def process_response(self, statement): """ Speak the response. """ cmd = ['say', str(statement.text)] if platform.system().lower() == 'darwin': subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return statement.text
11d3075ba9d1881526ce90d01ae3b3d5728740fa
setup.py
setup.py
#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'core/*', 'bundles/*']} )
#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']} )
Install stuff from /addon, too.
Install stuff from /addon, too.
Python
mit
pyos/dg
#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'core/*', 'bundles/*']} ) Install stuff from /addon, too.
#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']} )
<commit_before>#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'core/*', 'bundles/*']} ) <commit_msg>Install stuff from /addon, too.<commit_after>
#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']} )
#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'core/*', 'bundles/*']} ) Install stuff from /addon, too.#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']} )
<commit_before>#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'core/*', 'bundles/*']} ) <commit_msg>Install stuff from /addon, too.<commit_after>#!/usr/bin/env python3 from distutils.core import setup setup( name='dg', version='HEAD', description='A programming language for the CPython VM', author='pyos', author_email='pyos100500@gmail.com', url='https://github.com/pyos/dg.git', packages=['dg'], package_dir={'dg': '.'}, package_data={'dg': ['*.dg', 'addon/*', 'core/*', 'bundles/*']} )
4e66b44075cf8274204f5e6678d7ba6e57aadddf
setup.py
setup.py
from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master'])
from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '/home/johnmadden/syse/': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master'])
Change is the Law of Life (6)
Change is the Law of Life (6)
Python
mit
NathanWycoff/SySE
from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master']) Change is the Law of Life (6)
from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '/home/johnmadden/syse/': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master'])
<commit_before>from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master']) <commit_msg>Change is the Law of Life (6)<commit_after>
from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '/home/johnmadden/syse/': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master'])
from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master']) Change is the Law of Life (6)from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '/home/johnmadden/syse/': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master'])
<commit_before>from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master']) <commit_msg>Change is the Law of Life (6)<commit_after>from setuptools import setup, find_packages setup(name='syse', version='1.0', description='A syntactic sentence extraction program.', url='https://github.com/NathanWycoff/SySE', author='Nathan Wycoff', author_email='nathanbrwycoff@gmail.com', packages=['syse'], license = 'MIT', install_requires = [ 'pandas', 'numpy'], package_data = { # If any package contains *.txt or *.rst files, include them: '/home/johnmadden/syse/': ['default','README','LICENSE'] }, include_package_data = True, dependency_links = ['https://github.com/emilmont/pyStatParser/tarball/master'])
96421cfe9711c77fb27a028d8e942bffd3059dd3
project/api/urls.py
project/api/urls.py
from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/$', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]
from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]
Fix regex for registration url
Fix regex for registration url
Python
mit
djstein/messages-grailed
from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/$', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]Fix regex for registration url
from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]
<commit_before>from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/$', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]<commit_msg>Fix regex for registration url<commit_after>
from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]
from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/$', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]Fix regex for registration urlfrom project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]
<commit_before>from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/$', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]<commit_msg>Fix regex for registration url<commit_after>from project.api.views import ChannelViewSet, MessageViewSet, UserViewSet from django.conf.urls import url, include from rest_framework.authtoken import views from rest_framework.routers import DefaultRouter from rest_framework.schemas import get_schema_view from rest_framework.authtoken import views schema_view = get_schema_view(title='Grailed API') # Create a router and register our viewsets with it. router = DefaultRouter() router.register(r'users', UserViewSet) router.register(r'channels', ChannelViewSet) router.register(r'messages', MessageViewSet) # The API URLs are now determined automatically by the router. # Additionally, we include the login URLs for the browsable API. urlpatterns = [ url(r'^schema/$', schema_view), url(r'^', include(router.urls)), url(r'^', include('rest_auth.urls')), url(r'^registration/', include('rest_auth.registration.urls')), url(r'^api-token-auth/', views.obtain_auth_token), # fet token with username and password ]
0df7044bf2c697fe87ea82e4e82ae8895c7fa4a6
wsme/restjson.py
wsme/restjson.py
import base64 from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json def prepare_encode(value, datatype): if datatype in wsme.types.pod_types: return value if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = prepare_encode(getattr(value, name), attr.datatype) return d if datatype in wsme.types.dt_types: return value.isoformat() if datatype is wsme.types.binary: return base64.encode() class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': prepare_encode(result, return_type)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol)
import base64 import datetime from simplegeneric import generic from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json @generic def tojson(datatype, value): if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = tojson(attr.datatype, getattr(value, name)) return d return value @tojson.when_object(datetime.date) def date_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.time) def time_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.datetime) def datetime_tojson(datatype, value): return value.isoformat() @tojson.when_object(wsme.types.binary) def datetime_tojson(datatype, value): return base64.encode(value) class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': tojson(return_type, result)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol)
Use generic to prepare the json output so that non-structured custom types can be added
Use generic to prepare the json output so that non-structured custom types can be added
Python
mit
stackforge/wsme
import base64 from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json def prepare_encode(value, datatype): if datatype in wsme.types.pod_types: return value if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = prepare_encode(getattr(value, name), attr.datatype) return d if datatype in wsme.types.dt_types: return value.isoformat() if datatype is wsme.types.binary: return base64.encode() class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': prepare_encode(result, return_type)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol) Use generic to prepare the json output so that non-structured custom types can be added
import base64 import datetime from simplegeneric import generic from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json @generic def tojson(datatype, value): if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = tojson(attr.datatype, getattr(value, name)) return d return value @tojson.when_object(datetime.date) def date_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.time) def time_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.datetime) def datetime_tojson(datatype, value): return value.isoformat() @tojson.when_object(wsme.types.binary) def datetime_tojson(datatype, value): return base64.encode(value) class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': tojson(return_type, result)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol)
<commit_before>import base64 from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json def prepare_encode(value, datatype): if datatype in wsme.types.pod_types: return value if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = prepare_encode(getattr(value, name), attr.datatype) return d if datatype in wsme.types.dt_types: return value.isoformat() if datatype is wsme.types.binary: return base64.encode() class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': prepare_encode(result, return_type)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol) <commit_msg>Use generic to prepare the json output so that non-structured custom types can be added<commit_after>
import base64 import datetime from simplegeneric import generic from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json @generic def tojson(datatype, value): if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = tojson(attr.datatype, getattr(value, name)) return d return value @tojson.when_object(datetime.date) def date_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.time) def time_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.datetime) def datetime_tojson(datatype, value): return value.isoformat() @tojson.when_object(wsme.types.binary) def datetime_tojson(datatype, value): return base64.encode(value) class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': tojson(return_type, result)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol)
import base64 from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json def prepare_encode(value, datatype): if datatype in wsme.types.pod_types: return value if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = prepare_encode(getattr(value, name), attr.datatype) return d if datatype in wsme.types.dt_types: return value.isoformat() if datatype is wsme.types.binary: return base64.encode() class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': prepare_encode(result, return_type)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol) Use generic to prepare the json output so that non-structured custom types can be addedimport base64 import datetime from simplegeneric import generic from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json @generic def tojson(datatype, value): if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = tojson(attr.datatype, getattr(value, name)) return d return value @tojson.when_object(datetime.date) def date_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.time) def time_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.datetime) def datetime_tojson(datatype, value): return value.isoformat() @tojson.when_object(wsme.types.binary) def datetime_tojson(datatype, value): return base64.encode(value) class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': tojson(return_type, result)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol)
<commit_before>import base64 from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json def prepare_encode(value, datatype): if datatype in wsme.types.pod_types: return value if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = prepare_encode(getattr(value, name), attr.datatype) return d if datatype in wsme.types.dt_types: return value.isoformat() if datatype is wsme.types.binary: return base64.encode() class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': prepare_encode(result, return_type)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol) <commit_msg>Use generic to prepare the json output so that non-structured custom types can be added<commit_after>import base64 import datetime from simplegeneric import generic from wsme.rest import RestProtocol from wsme.controller import register_protocol import wsme.types try: import simplejson as json except ImportError: import json @generic def tojson(datatype, value): if wsme.types.isstructured(datatype): d = dict() for name, attr in wsme.types.list_attributes(datatype): d[name] = tojson(attr.datatype, getattr(value, name)) return d return value @tojson.when_object(datetime.date) def date_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.time) def time_tojson(datatype, value): return value.isoformat() @tojson.when_object(datetime.datetime) def datetime_tojson(datatype, value): return value.isoformat() @tojson.when_object(wsme.types.binary) def datetime_tojson(datatype, value): return base64.encode(value) class RestJsonProtocol(RestProtocol): name = 'REST+Json' dataformat = 'json' content_types = ['application/json', 'text/json', None] def decode_args(self, req, arguments): kw = json.loads(req.body) return kw def encode_result(self, result, return_type): return json.dumps({'result': tojson(return_type, result)}) def encode_error(self, errordetail): return json.dumps(errordetail) register_protocol(RestJsonProtocol)
026621ae88fc545de32d47d92d34af5049e48704
setup.py
setup.py
#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.0.11, <= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} )
#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} )
Remove upper bound for packages
Remove upper bound for packages oedialect requires sqlalchemy >= 1.2.0 which would make it incompatible with ego.io for no reason
Python
agpl-3.0
openego/ego.io,openego/ego.io
#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.0.11, <= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} ) Remove upper bound for packages oedialect requires sqlalchemy >= 1.2.0 which would make it incompatible with ego.io for no reason
#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} )
<commit_before>#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.0.11, <= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} ) <commit_msg>Remove upper bound for packages oedialect requires sqlalchemy >= 1.2.0 which would make it incompatible with ego.io for no reason<commit_after>
#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} )
#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.0.11, <= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} ) Remove upper bound for packages oedialect requires sqlalchemy >= 1.2.0 which would make it incompatible with ego.io for no reason#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} )
<commit_before>#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.0.11, <= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} ) <commit_msg>Remove upper bound for packages oedialect requires sqlalchemy >= 1.2.0 which would make it incompatible with ego.io for no reason<commit_after>#! /usr/bin/env python # coding: utf-8 from setuptools import find_packages, setup setup(name='egoio', author='NEXT ENERGY, Reiner Lemoine Institut gGmbH, ZNES', author_email='ulf.p.mueller@hs-flensburg.de', description='ego input/output repository', version='0.4.5', url='https://github.com/openego/ego.io', packages=find_packages(), license='GNU Affero General Public License v3.0', install_requires=[ 'geoalchemy2 >= 0.3.0, <= 0.4.1', 'sqlalchemy >= 1.2.0', 'keyring >= 4.0', 'keyrings.alt', 'psycopg2'], extras_require={ "sqlalchemy": 'postgresql'}, package_data={'tools': 'sqlacodegen_oedb.sh'} )
6e6a5cfb39ae3f6ee9d0cfb30a6a33be06839bfa
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], version='0.3', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], )
#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], # version number format is clibrary - python version='0.4', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], )
Update module version and tag as v0.4
Update module version and tag as v0.4
Python
apache-2.0
mapcode-foundation/mapcode-python,mapcode-foundation/mapcode-python
#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], version='0.3', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], ) Update module version and tag as v0.4
#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], # version number format is clibrary - python version='0.4', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], )
<commit_before>#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], version='0.3', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], ) <commit_msg>Update module version and tag as v0.4<commit_after>
#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], # version number format is clibrary - python version='0.4', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], )
#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], version='0.3', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], ) Update module version and tag as v0.4#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], # version number format is clibrary - python version='0.4', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], )
<commit_before>#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], version='0.3', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.3', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], ) <commit_msg>Update module version and tag as v0.4<commit_after>#!/usr/bin/env python from distutils.core import setup, Extension setup( name='mapcode', ext_modules=[Extension('mapcode', sources=['mapcodemodule.c', 'mapcodelib/mapcoder.c'], include_dirs=['mapcodelib'] )], # version number format is clibrary - python version='0.4', description='A Python module to do mapcode encoding and decoding. See http://www.mapcode.com for more information.', author='Erik Bos', author_email='erik@xs4all.nl', url='https://github.com/mapcode-foundation/mapcode-python', download_url='https://github.com/mapcode-foundation/mapcode-python/tarball/v0.4', license='Apache License 2.0', classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: Apache Software License' ], )
7891cf254bb98b65503675a20ed6b013385328cf
setup.py
setup.py
# coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params())
# coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint_netconnectd": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params())
Copy paste error leading to static and template folders not being properly installed along side the package
Copy paste error leading to static and template folders not being properly installed along side the package
Python
agpl-3.0
OctoPrint/OctoPrint-Netconnectd,mrbeam/OctoPrint-Netconnectd,mrbeam/OctoPrint-Netconnectd,OctoPrint/OctoPrint-Netconnectd,mrbeam/OctoPrint-Netconnectd
# coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params()) Copy paste error leading to static and template folders not being properly installed along side the package
# coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint_netconnectd": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params())
<commit_before># coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params()) <commit_msg>Copy paste error leading to static and template folders not being properly installed along side the package<commit_after>
# coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint_netconnectd": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params())
# coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params()) Copy paste error leading to static and template folders not being properly installed along side the package# coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint_netconnectd": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params())
<commit_before># coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params()) <commit_msg>Copy paste error leading to static and template folders not being properly installed along side the package<commit_after># coding=utf-8 import setuptools def package_data_dirs(source, sub_folders): import os dirs = [] for d in sub_folders: for dirname, _, files in os.walk(os.path.join(source, d)): dirname = os.path.relpath(dirname, source) for f in files: dirs.append(os.path.join(dirname, f)) return dirs def params(): name = "OctoPrint-Netconnectd" version = "0.1" description = "Client for netconnectd that allows configuration of netconnectd through OctoPrint's settings dialog. It's only available for Linux right now." author = "Gina Häußge" author_email = "osd@foosel.net" url = "http://octoprint.org" license = "AGPLv3" packages = ["octoprint_netconnectd"] package_data = {"octoprint_netconnectd": package_data_dirs('octoprint_netconnectd', ['static', 'templates'])} include_package_data = True zip_safe = False install_requires = open("requirements.txt").read().split("\n") entry_points = { "octoprint.plugin": [ "netconnectd = octoprint_netconnectd" ] } return locals() setuptools.setup(**params())
afb40cdfc52a68947433cb732aa6124d6a90f2df
snippet/example/python/project/project/main.py
snippet/example/python/project/project/main.py
#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) log.setup(CONF, project, __VERSION__) eventlet.monkey_patch(all=True) # (TODO) pass if __name__ == '__main__': main()
#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" project_opts = [ cfg.StrOpt("logging_config_file", default="", help="The configuration file of logging for the {PROJECT}"), ] CONF.register_cli_options(project_opts, group="{PROJECT}") def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) # (TODO) Daemon eventlet.monkey_patch(all=True) if CONF.{PROJECT}.logging_config_file: log._load_log_config(CONF.{PROJECT}.logging_config_file) else: log.setup(CONF, project, __VERSION__) # (TODO) pass if __name__ == '__main__': main()
Modify the logging configuration in Python Example
Modify the logging configuration in Python Example
Python
mit
xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet,xgfone/snippet
#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) log.setup(CONF, project, __VERSION__) eventlet.monkey_patch(all=True) # (TODO) pass if __name__ == '__main__': main() Modify the logging configuration in Python Example
#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" project_opts = [ cfg.StrOpt("logging_config_file", default="", help="The configuration file of logging for the {PROJECT}"), ] CONF.register_cli_options(project_opts, group="{PROJECT}") def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) # (TODO) Daemon eventlet.monkey_patch(all=True) if CONF.{PROJECT}.logging_config_file: log._load_log_config(CONF.{PROJECT}.logging_config_file) else: log.setup(CONF, project, __VERSION__) # (TODO) pass if __name__ == '__main__': main()
<commit_before>#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) log.setup(CONF, project, __VERSION__) eventlet.monkey_patch(all=True) # (TODO) pass if __name__ == '__main__': main() <commit_msg>Modify the logging configuration in Python Example<commit_after>
#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" project_opts = [ cfg.StrOpt("logging_config_file", default="", help="The configuration file of logging for the {PROJECT}"), ] CONF.register_cli_options(project_opts, group="{PROJECT}") def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) # (TODO) Daemon eventlet.monkey_patch(all=True) if CONF.{PROJECT}.logging_config_file: log._load_log_config(CONF.{PROJECT}.logging_config_file) else: log.setup(CONF, project, __VERSION__) # (TODO) pass if __name__ == '__main__': main()
#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) log.setup(CONF, project, __VERSION__) eventlet.monkey_patch(all=True) # (TODO) pass if __name__ == '__main__': main() Modify the logging configuration in Python Example#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" project_opts = [ cfg.StrOpt("logging_config_file", default="", help="The configuration file of logging for the {PROJECT}"), ] CONF.register_cli_options(project_opts, group="{PROJECT}") def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) # (TODO) Daemon eventlet.monkey_patch(all=True) if CONF.{PROJECT}.logging_config_file: log._load_log_config(CONF.{PROJECT}.logging_config_file) else: log.setup(CONF, project, __VERSION__) # (TODO) pass if __name__ == '__main__': main()
<commit_before>#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) log.setup(CONF, project, __VERSION__) eventlet.monkey_patch(all=True) # (TODO) pass if __name__ == '__main__': main() <commit_msg>Modify the logging configuration in Python Example<commit_after>#!/usr/bin/python # encoding: utf8 from __future__ import absolute_import, print_function, unicode_literals import eventlet from oslo_config import cfg from oslo_log import log # from {PROJECT}.db import api CONF = cfg.CONF __VERSION__ = "0.1" project_opts = [ cfg.StrOpt("logging_config_file", default="", help="The configuration file of logging for the {PROJECT}"), ] CONF.register_cli_options(project_opts, group="{PROJECT}") def main(project="example"): log.register_options(CONF) # log.set_defaults(default_log_levels=None) CONF(project=project, version=__VERSION__) # (TODO) Daemon eventlet.monkey_patch(all=True) if CONF.{PROJECT}.logging_config_file: log._load_log_config(CONF.{PROJECT}.logging_config_file) else: log.setup(CONF, project, __VERSION__) # (TODO) pass if __name__ == '__main__': main()
101e4832b8fa9fa9da0b447c4c52fb3bd0e3c6a9
ratechecker/migrations/0002_remove_fee_loader.py
ratechecker/migrations/0002_remove_fee_loader.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'DROP INDEX idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ #migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ]
Remove DROP INDEX from fix_fee_product_index
Remove DROP INDEX from fix_fee_product_index
Python
cc0-1.0
cfpb/owning-a-home-api
# -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'DROP INDEX idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ #migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ] Remove DROP INDEX from fix_fee_product_index
# -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'DROP INDEX idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ #migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ] <commit_msg>Remove DROP INDEX from fix_fee_product_index<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'DROP INDEX idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ #migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ] Remove DROP INDEX from fix_fee_product_index# -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ]
<commit_before># -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'DROP INDEX idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ #migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ] <commit_msg>Remove DROP INDEX from fix_fee_product_index<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.11.22 on 2019-10-31 16:33 from __future__ import unicode_literals from django.db import migrations, OperationalError, ProgrammingError def fix_fee_product_index(apps, schema_editor): try: schema_editor.execute( 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'DROP CONSTRAINT IF EXISTS idx_16977_product_id;' 'ALTER TABLE IF EXISTS cfpb.ratechecker_fee ' 'ADD CONSTRAINT idx_16977_product_id ' 'UNIQUE (product_id, state_id, lender, single_family, condo, coop);' ) except (ProgrammingError, OperationalError): pass class Migration(migrations.Migration): dependencies = [ ('ratechecker', '0001_initial'), ] operations = [ migrations.RunPython(fix_fee_product_index), migrations.AlterUniqueTogether( name='fee', unique_together=set([]), ), migrations.RemoveField( model_name='fee', name='plan', ), migrations.DeleteModel( name='Fee', ), ]
5eebd015d8a3d7c562f49df7adf31712ecea8ef0
scripts/scrape-ecdc-list.py
scripts/scrape-ecdc-list.py
#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8")
#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content().strip() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8")
Tweak ECDC scraper to strip whitespace
Tweak ECDC scraper to strip whitespace
Python
mit
BuzzFeedNews/zika-data
#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8") Tweak ECDC scraper to strip whitespace
#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content().strip() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8")
<commit_before>#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8") <commit_msg>Tweak ECDC scraper to strip whitespace<commit_after>
#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content().strip() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8")
#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8") Tweak ECDC scraper to strip whitespace#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content().strip() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8")
<commit_before>#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8") <commit_msg>Tweak ECDC scraper to strip whitespace<commit_after>#!/usr/bin/env python import requests import lxml.html import pandas as pd import sys URL = "http://ecdc.europa.eu/en/healthtopics/zika_virus_infection/zika-outbreak/Pages/Zika-countries-with-transmission.aspx" columns_old = [ "country", "affected_past_nine_months", "affected_past_two_months" ] columns_new = [ "country", "affected_past_two_months", "affected_past_nine_months" ] def scrape(): html = requests.get(URL).content dom = lxml.html.fromstring(html) table = dom.cssselect(".ms-rteTable-1")[0] rows = table.cssselect("tr")[1:] data = [ [ td.text_content().strip() for td in tr.cssselect("td, th") ] for tr in rows ] df = pd.DataFrame(data, columns=columns_new)[columns_old] return df if __name__ == "__main__": df = scrape() df.to_csv(sys.stdout, index=False, encoding="utf-8")